package org.opendaylight.controller.forwarding.staticrouting;
+import java.io.Serializable;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.util.Map;
+import org.opendaylight.controller.hosttracker.hostAware.HostNodeConnector;
import org.opendaylight.controller.sal.core.Node;
import org.opendaylight.controller.sal.core.NodeConnector;
import org.opendaylight.controller.sal.packet.BitBufferHelper;
import org.opendaylight.controller.sal.utils.NodeConnectorCreator;
import org.opendaylight.controller.sal.utils.NodeCreator;
-
-import org.opendaylight.controller.hosttracker.hostAware.HostNodeConnector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class defines a static route object.
*/
-public class StaticRoute {
+public class StaticRoute implements Serializable{
+ private static final long serialVersionUID = 1L;
protected static final Logger logger = LoggerFactory
.getLogger(StaticRoute.class);
/**
* This Enum defines the possible types for the next hop address.
*/
- public enum NextHopType {
+ public enum NextHopType implements Serializable {
IPADDRESS("nexthop-ip"), SWITCHPORT("nexthop-interface");
private NextHopType(String name) {
this.name = name;
import org.opendaylight.controller.sal.utils.NetUtils;
import org.opendaylight.controller.sal.utils.NodeConnectorCreator;
import org.opendaylight.controller.sal.utils.ServiceHelper;
+import org.opendaylight.controller.sal.utils.Status;
import org.opendaylight.controller.sal.utils.StatusCode;
import org.opendaylight.controller.switchmanager.ISwitchManager;
import org.opendaylight.controller.switchmanager.Switch;
* Configuration Java Object which represents a flow configuration information
* for Forwarding Rules Manager.
*/
-
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
public class FlowConfig implements Serializable {
private static final long serialVersionUID = 1L;
private static final Logger log = LoggerFactory.getLogger(FlowConfig.class);
- private static final String staticFlowsGroup = "**StaticFlows";
+ public static final String staticFlowsGroup = "**StaticFlows";
+ public static final String internalStaticFlowsGroup = "**InternalStaticFlows";
+ public static final String internalStaticFlowBegin = "**";
private boolean dynamic;
private String status;
public FlowConfig() {
}
- public FlowConfig(String installInHw, String name, Node node,
- String priority, String cookie, String ingressPort,
- String portGroup, String vlanId, String vlanPriority,
- String etherType, String srcMac, String dstMac, String protocol,
- String tosBits, String srcIP, String dstIP, String tpSrc,
- String tpDst, String idleTimeout, String hardTimeout,
- List<String> actions) {
+ public FlowConfig(String installInHw, String name, Node node, String priority, String cookie, String ingressPort,
+ String portGroup, String vlanId, String vlanPriority, String etherType, String srcMac, String dstMac,
+ String protocol, String tosBits, String srcIP, String dstIP, String tpSrc, String tpDst,
+ String idleTimeout, String hardTimeout, List<String> actions) {
super();
this.installInHw = installInHw;
this.name = name;
public boolean isInternalFlow() {
// Controller generated static flows have name starting with "**"
- return (this.name != null && this.name.startsWith("**"));
+ return (this.name != null && this.name.startsWith(FlowConfig.internalStaticFlowBegin));
}
public String getName() {
@Override
public String toString() {
- return "FlowConfig [dynamic=" + dynamic + ", status=" + status
- + ", installInHw=" + installInHw + ", name=" + name
- + ", switchId=" + node + ", ingressPort=" + ingressPort
- + ", portGroup=" + portGroup + ", etherType=" + etherType
- + ", priority=" + priority + ", vlanId=" + vlanId
- + ", vlanPriority=" + vlanPriority + ", dlSrc=" + dlSrc
- + ", dlDst=" + dlDst + ", nwSrc=" + nwSrc + ", nwDst=" + nwDst
- + ", protocol=" + protocol + ", tosBits=" + tosBits
- + ", tpSrc=" + tpSrc + ", tpDst=" + tpDst + ", cookie="
- + cookie + ", idleTimeout=" + idleTimeout + ", hardTimeout="
- + hardTimeout + ", actions=" + actions + "]";
+ return "FlowConfig [dynamic=" + dynamic + ", status=" + status + ", installInHw=" + installInHw + ", name="
+ + name + ", switchId=" + node + ", ingressPort=" + ingressPort + ", portGroup=" + portGroup
+ + ", etherType=" + etherType + ", priority=" + priority + ", vlanId=" + vlanId + ", vlanPriority="
+ + vlanPriority + ", dlSrc=" + dlSrc + ", dlDst=" + dlDst + ", nwSrc=" + nwSrc + ", nwDst=" + nwDst
+ + ", protocol=" + protocol + ", tosBits=" + tosBits + ", tpSrc=" + tpSrc + ", tpDst=" + tpDst
+ + ", cookie=" + cookie + ", idleTimeout=" + idleTimeout + ", hardTimeout=" + hardTimeout + ", actions="
+ + actions + "]";
}
public void setPortGroup(String portGroup) {
}
public boolean isIPv6() {
- if (NetUtils.isIPv6AddressValid(this.getSrcIp())
- || NetUtils.isIPv6AddressValid(this.getDstIp())) {
- return true;
- }
- return false;
+ return NetUtils.isIPv6AddressValid(this.getSrcIp()) || NetUtils.isIPv6AddressValid(this.getDstIp());
}
public List<String> getActions() {
result = prime * result + ((dlDst == null) ? 0 : dlDst.hashCode());
result = prime * result + ((dlSrc == null) ? 0 : dlSrc.hashCode());
result = prime * result + (dynamic ? 1231 : 1237);
- result = prime * result
- + ((etherType == null) ? 0 : etherType.hashCode());
- result = prime * result
- + ((ingressPort == null) ? 0 : ingressPort.hashCode());
+ result = prime * result + ((etherType == null) ? 0 : etherType.hashCode());
+ result = prime * result + ((ingressPort == null) ? 0 : ingressPort.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((nwDst == null) ? 0 : nwDst.hashCode());
result = prime * result + ((nwSrc == null) ? 0 : nwSrc.hashCode());
- result = prime * result
- + ((portGroup == null) ? 0 : portGroup.hashCode());
- result = prime * result
- + ((priority == null) ? 0 : priority.hashCode());
- result = prime * result
- + ((protocol == null) ? 0 : protocol.hashCode());
+ result = prime * result + ((portGroup == null) ? 0 : portGroup.hashCode());
+ result = prime * result + ((priority == null) ? 0 : priority.hashCode());
+ result = prime * result + ((protocol == null) ? 0 : protocol.hashCode());
result = prime * result + ((node == null) ? 0 : node.hashCode());
result = prime * result + ((tosBits == null) ? 0 : tosBits.hashCode());
result = prime * result + ((tpDst == null) ? 0 : tpDst.hashCode());
result = prime * result + ((tpSrc == null) ? 0 : tpSrc.hashCode());
result = prime * result + ((vlanId == null) ? 0 : vlanId.hashCode());
- result = prime * result
- + ((vlanPriority == null) ? 0 : vlanPriority.hashCode());
- result = prime * result
- + ((idleTimeout == null) ? 0 : idleTimeout.hashCode());
- result = prime * result
- + ((hardTimeout == null) ? 0 : hardTimeout.hashCode());
+ result = prime * result + ((vlanPriority == null) ? 0 : vlanPriority.hashCode());
+ result = prime * result + ((idleTimeout == null) ? 0 : idleTimeout.hashCode());
+ result = prime * result + ((hardTimeout == null) ? 0 : hardTimeout.hashCode());
return result;
}
return false;
}
- Pattern macPattern = Pattern
- .compile("([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}");
+ Pattern macPattern = Pattern.compile("([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}");
Matcher mm = macPattern.matcher(mac);
if (!mm.matches()) {
- log.debug(
- "Ethernet address {} is not valid. Example: 00:05:b9:7c:81:5f",
- mac);
+ log.debug("Ethernet address {} is not valid. Example: 00:05:b9:7c:81:5f", mac);
return false;
}
return true;
return ((to >= 0) && (to <= 0xffff));
}
- private boolean conflictWithContainerFlow(IContainer container,
- StringBuffer resultStr) {
+ private Status conflictWithContainerFlow(IContainer container) {
// Return true if it's default container
if (container.getName().equals(GlobalConstants.DEFAULT.toString())) {
- return false;
+ return new Status(StatusCode.SUCCESS);
}
// No container flow = no conflict
List<ContainerFlow> cFlowList = container.getContainerFlows();
if (((cFlowList == null)) || cFlowList.isEmpty()) {
- return false;
+ return new Status(StatusCode.SUCCESS);
}
// Check against each container's flow
for (ContainerFlow cFlow : cFlowList) {
if (cFlow.allowsFlow(flow)) {
log.trace("Config is congruent with at least one container flow");
- return false;
+ return new Status(StatusCode.SUCCESS);
}
}
String msg = "Flow Config conflicts with all existing container flows";
- resultStr.append(msg);
log.trace(msg);
- return true;
+ return new Status(StatusCode.BADREQUEST, msg);
}
- public boolean isValid(IContainer container, StringBuffer resultStr) {
+ public Status validate(IContainer container) {
EtherIPType etype = EtherIPType.ANY;
EtherIPType ipsrctype = EtherIPType.ANY;
EtherIPType ipdsttype = EtherIPType.ANY;
- String containerName = (container == null) ? GlobalConstants.DEFAULT
- .toString() : container.getName();
- ISwitchManager switchManager = (ISwitchManager) ServiceHelper
- .getInstance(ISwitchManager.class, containerName, this);
+ String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container.getName();
+ ISwitchManager switchManager = (ISwitchManager) ServiceHelper.getInstance(ISwitchManager.class, containerName,
+ this);
Switch sw = null;
try {
- if (name == null) {
- resultStr.append(String.format("Name is null"));
- return false;
+ if (name == null || name.trim().isEmpty()) {
+ return new Status(StatusCode.BADREQUEST, "Invalid name");
}
+
if (node == null) {
- resultStr.append(String.format("Node is null"));
- return false;
+ return new Status(StatusCode.BADREQUEST, "Node is null");
}
+
if (switchManager != null) {
for (Switch device : switchManager.getNetworkDevices()) {
if (device.getNode().equals(node)) {
}
}
if (sw == null) {
- resultStr.append(String.format("Node %s not found", node));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("Node %s not found", node));
}
} else {
log.debug("switchmanager is not set yet");
}
if (priority != null) {
- if (Integer.decode(priority) < 0
- || (Integer.decode(priority) > 65535)) {
- resultStr.append(String.format(
- "priority %s is not in the range 0 - 65535",
+ if (Integer.decode(priority) < 0 || (Integer.decode(priority) > 65535)) {
+ return new Status(StatusCode.BADREQUEST, String.format("priority %s is not in the range 0 - 65535",
priority));
- return false;
}
}
if (ingressPort != null) {
Short port = Short.decode(ingressPort);
if (isPortValid(sw, port) == false) {
- resultStr
- .append(String
- .format("Ingress port %d is not valid for the Switch",
- port));
- if ((container != null)
- && !container.getName().equals(
- GlobalConstants.DEFAULT.toString())) {
- resultStr
- .append(" in Container " + container.getName());
+ String msg = String.format("Ingress port %d is not valid for the Switch", port);
+ if ((container != null) && !container.getName().equals(GlobalConstants.DEFAULT.toString())) {
+ msg += " in Container " + container.getName();
}
- return false;
+ return new Status(StatusCode.BADREQUEST, msg);
}
}
if ((vlanId != null) && !isVlanIdValid(vlanId)) {
- resultStr.append(String.format(
- "Vlan ID %s is not in the range 0 - 4095", vlanId));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("Vlan ID %s is not in the range 0 - 4095",
+ vlanId));
}
if ((vlanPriority != null) && !isVlanPriorityValid(vlanPriority)) {
- resultStr.append(String.format(
- "Vlan priority %s is not in the range 0 - 7",
+ return new Status(StatusCode.BADREQUEST, String.format("Vlan priority %s is not in the range 0 - 7",
vlanPriority));
- return false;
}
-
if (etherType != null) {
int type = Integer.decode(etherType);
if ((type < 0) || (type > 0xffff)) {
- resultStr.append(String.format(
- "Ethernet type %s is not valid", etherType));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("Ethernet type %s is not valid", etherType));
} else {
if (type == 0x800) {
etype = EtherIPType.V4;
}
if ((tosBits != null) && !isTOSBitsValid(tosBits)) {
- resultStr.append(String.format(
- "IP ToS bits %s is not in the range 0 - 63", tosBits));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("IP ToS bits %s is not in the range 0 - 63",
+ tosBits));
}
if ((tpSrc != null) && !isTpPortValid(tpSrc)) {
- resultStr.append(String.format(
- "Transport source port %s is not valid", tpSrc));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("Transport source port %s is not valid", tpSrc));
}
+
if ((tpDst != null) && !isTpPortValid(tpDst)) {
- resultStr.append(String.format(
- "Transport destination port %s is not valid", tpDst));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("Transport destination port %s is not valid",
+ tpDst));
}
if ((dlSrc != null) && !isL2AddressValid(dlSrc)) {
- resultStr
- .append(String
- .format("Ethernet source address %s is not valid. Example: 00:05:b9:7c:81:5f",
- dlSrc));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format(
+ "Ethernet source address %s is not valid. Example: 00:05:b9:7c:81:5f", dlSrc));
}
if ((dlDst != null) && !isL2AddressValid(dlDst)) {
- resultStr
- .append(String
- .format("Ethernet destination address %s is not valid. Example: 00:05:b9:7c:81:5f",
- dlDst));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format(
+ "Ethernet destination address %s is not valid. Example: 00:05:b9:7c:81:5f", dlDst));
}
if (nwSrc != null) {
} else if (NetUtils.isIPv6AddressValid(nwSrc)) {
ipsrctype = EtherIPType.V6;
} else {
- resultStr.append(String.format(
- "IP source address %s is not valid", nwSrc));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("IP source address %s is not valid", nwSrc));
}
}
} else if (NetUtils.isIPv6AddressValid(nwDst)) {
ipdsttype = EtherIPType.V6;
} else {
- resultStr.append(String.format(
- "IP destination address %s is not valid", nwDst));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("IP destination address %s is not valid",
+ nwDst));
}
}
if (etype != EtherIPType.ANY) {
if ((ipsrctype != EtherIPType.ANY) && (ipsrctype != etype)) {
- resultStr.append(String
- .format("Type mismatch between Ethernet & Src IP"));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("Type mismatch between Ethernet & Src IP"));
}
if ((ipdsttype != EtherIPType.ANY) && (ipdsttype != etype)) {
- resultStr.append(String
- .format("Type mismatch between Ethernet & Dst IP"));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("Type mismatch between Ethernet & Dst IP"));
}
}
if (ipsrctype != ipdsttype) {
if (!((ipsrctype == EtherIPType.ANY) || (ipdsttype == EtherIPType.ANY))) {
- resultStr
- .append(String.format("IP Src Dest Type mismatch"));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("IP Src Dest Type mismatch"));
}
}
if ((idleTimeout != null) && !isTimeoutValid(idleTimeout)) {
- resultStr.append(String.format(
- "Idle Timeout value %s is not valid", idleTimeout));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("Idle Timeout value %s is not valid",
+ idleTimeout));
}
if ((hardTimeout != null) && !isTimeoutValid(hardTimeout)) {
- resultStr.append(String.format(
- "Hard Timeout value %s is not valid", hardTimeout));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("Hard Timeout value %s is not valid",
+ hardTimeout));
}
Matcher sstr;
sstr = Pattern.compile("OUTPUT=(.*)").matcher(actiongrp);
if (sstr.matches()) {
for (String t : sstr.group(1).split(",")) {
- Matcher n = Pattern.compile("(?:(\\d+))")
- .matcher(t);
+ Matcher n = Pattern.compile("(?:(\\d+))").matcher(t);
if (n.matches()) {
if (n.group(1) != null) {
Short port = Short.parseShort(n.group(1));
if (isPortValid(sw, port) == false) {
- resultStr
- .append(String
- .format("Output port %d is not valid for this switch",
- port));
+ String msg = String.format("Output port %d is not valid for this switch", port);
if ((container != null)
- && !container.getName().equals(
- GlobalConstants.DEFAULT
- .toString())) {
- resultStr.append(" in Container "
- + container.getName());
+ && !container.getName().equals(GlobalConstants.DEFAULT.toString())) {
+ msg += " in Container " + container.getName();
}
- return false;
+ return new Status(StatusCode.BADREQUEST, msg);
}
}
}
continue;
}
// Check src IP
- sstr = Pattern.compile(ActionType.FLOOD.toString())
- .matcher(actiongrp);
+ sstr = Pattern.compile(ActionType.FLOOD.toString()).matcher(actiongrp);
if (sstr.matches()) {
if (container != null) {
- resultStr.append(String.format(
- "flood is not allowed in container %s",
- container.getName()));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format(
+ "flood is not allowed in container %s", container.getName()));
}
continue;
}
// Check src IP
- sstr = Pattern.compile(
- ActionType.SET_NW_SRC.toString() + "=(.*)")
- .matcher(actiongrp);
+ sstr = Pattern.compile(ActionType.SET_NW_SRC.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
if (!NetUtils.isIPv4AddressValid(sstr.group(1))) {
- resultStr.append(String.format(
- "IP source address %s is not valid",
+ return new Status(StatusCode.BADREQUEST, String.format("IP source address %s is not valid",
sstr.group(1)));
- return false;
}
continue;
}
// Check dst IP
- sstr = Pattern.compile(
- ActionType.SET_NW_DST.toString() + "=(.*)")
- .matcher(actiongrp);
+ sstr = Pattern.compile(ActionType.SET_NW_DST.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
if (!NetUtils.isIPv4AddressValid(sstr.group(1))) {
- resultStr.append(String.format(
- "IP destination address %s is not valid",
- sstr.group(1)));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format(
+ "IP destination address %s is not valid", sstr.group(1)));
}
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_VLAN_ID.toString() + "=(.*)")
- .matcher(actiongrp);
+ sstr = Pattern.compile(ActionType.SET_VLAN_ID.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- if ((sstr.group(1) != null)
- && !isVlanIdValid(sstr.group(1))) {
- resultStr.append(String.format(
- "Vlan ID %s is not in the range 0 - 4095",
- sstr.group(1)));
- return false;
+ if ((sstr.group(1) != null) && !isVlanIdValid(sstr.group(1))) {
+ return new Status(StatusCode.BADREQUEST, String.format(
+ "Vlan ID %s is not in the range 0 - 4095", sstr.group(1)));
}
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_VLAN_PCP.toString() + "=(.*)")
- .matcher(actiongrp);
+ sstr = Pattern.compile(ActionType.SET_VLAN_PCP.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- if ((sstr.group(1) != null)
- && !isVlanPriorityValid(sstr.group(1))) {
- resultStr
- .append(String
- .format("Vlan priority %s is not in the range 0 - 7",
- sstr.group(1)));
- return false;
+ if ((sstr.group(1) != null) && !isVlanPriorityValid(sstr.group(1))) {
+ return new Status(StatusCode.BADREQUEST, String.format(
+ "Vlan priority %s is not in the range 0 - 7", sstr.group(1)));
}
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_DL_SRC.toString() + "=(.*)")
- .matcher(actiongrp);
+ sstr = Pattern.compile(ActionType.SET_DL_SRC.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- if ((sstr.group(1) != null)
- && !isL2AddressValid(sstr.group(1))) {
- resultStr
- .append(String
- .format("Ethernet source address %s is not valid. Example: 00:05:b9:7c:81:5f",
- sstr.group(1)));
- return false;
+ if ((sstr.group(1) != null) && !isL2AddressValid(sstr.group(1))) {
+ return new Status(StatusCode.BADREQUEST, String.format(
+ "Ethernet source address %s is not valid. Example: 00:05:b9:7c:81:5f",
+ sstr.group(1)));
}
continue;
}
-
- sstr = Pattern.compile(
- ActionType.SET_DL_DST.toString() + "=(.*)")
- .matcher(actiongrp);
+ sstr = Pattern.compile(ActionType.SET_DL_DST.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- if ((sstr.group(1) != null)
- && !isL2AddressValid(sstr.group(1))) {
- resultStr
- .append(String
- .format("Ethernet destination address %s is not valid. Example: 00:05:b9:7c:81:5f",
- sstr.group(1)));
- return false;
+ if ((sstr.group(1) != null) && !isL2AddressValid(sstr.group(1))) {
+ return new Status(StatusCode.BADREQUEST, String.format(
+ "Ethernet destination address %s is not valid. Example: 00:05:b9:7c:81:5f",
+ sstr.group(1)));
}
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_NW_TOS.toString() + "=(.*)")
- .matcher(actiongrp);
+ sstr = Pattern.compile(ActionType.SET_NW_TOS.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- if ((sstr.group(1) != null)
- && !isTOSBitsValid(sstr.group(1))) {
- resultStr
- .append(String
- .format("IP ToS bits %s is not in the range 0 - 63",
- sstr.group(1)));
- return false;
+ if ((sstr.group(1) != null) && !isTOSBitsValid(sstr.group(1))) {
+ return new Status(StatusCode.BADREQUEST, String.format(
+ "IP ToS bits %s is not in the range 0 - 63", sstr.group(1)));
}
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_TP_SRC.toString() + "=(.*)")
- .matcher(actiongrp);
+ sstr = Pattern.compile(ActionType.SET_TP_SRC.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- if ((sstr.group(1) != null)
- && !isTpPortValid(sstr.group(1))) {
- resultStr.append(String.format(
- "Transport source port %s is not valid",
- sstr.group(1)));
- return false;
+ if ((sstr.group(1) != null) && !isTpPortValid(sstr.group(1))) {
+ return new Status(StatusCode.BADREQUEST, String.format(
+ "Transport source port %s is not valid", sstr.group(1)));
}
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_TP_DST.toString() + "=(.*)")
- .matcher(actiongrp);
+ sstr = Pattern.compile(ActionType.SET_TP_DST.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- if ((sstr.group(1) != null)
- && !isTpPortValid(sstr.group(1))) {
- resultStr
- .append(String
- .format("Transport destination port %s is not valid",
- sstr.group(1)));
- return false;
+ if ((sstr.group(1) != null) && !isTpPortValid(sstr.group(1))) {
+ return new Status(StatusCode.BADREQUEST, String.format(
+ "Transport destination port %s is not valid", sstr.group(1)));
}
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_NEXT_HOP.toString() + "=(.*)")
- .matcher(actiongrp);
+ sstr = Pattern.compile(ActionType.SET_NEXT_HOP.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
if (!NetUtils.isIPAddressValid(sstr.group(1))) {
- resultStr.append(String.format(
- "IP destination address %s is not valid",
- sstr.group(1)));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format(
+ "IP destination address %s is not valid", sstr.group(1)));
}
continue;
}
}
}
// Check against the container flow
- if ((container != null)
- && conflictWithContainerFlow(container, resultStr)) {
- return false;
+ Status status;
+ if ((container != null) && !(status = conflictWithContainerFlow(container)).isSuccess()) {
+ return status;
}
} catch (NumberFormatException e) {
- resultStr.append(String.format("Invalid number format %s",
- e.getMessage()));
- return false;
+ return new Status(StatusCode.BADREQUEST, String.format("Invalid number format %s", e.getMessage()));
}
- return true;
+ return new Status(StatusCode.SUCCESS);
}
public FlowEntry getFlowEntry() {
- return new FlowEntry(FlowConfig.staticFlowsGroup, this.name,
- this.getFlow(), this.getNode());
+ return new FlowEntry(FlowConfig.staticFlowsGroup, this.name, this.getFlow(), this.getNode());
}
public Flow getFlow() {
Match match = new Match();
if (this.ingressPort != null) {
- match.setField(
- MatchType.IN_PORT,
- NodeConnectorCreator.createOFNodeConnector(
- Short.parseShort(ingressPort), getNode()));
+ match.setField(MatchType.IN_PORT,
+ NodeConnectorCreator.createOFNodeConnector(Short.parseShort(ingressPort), getNode()));
}
if (this.dlSrc != null) {
- match.setField(MatchType.DL_SRC,
- HexEncode.bytesFromHexString(this.dlSrc));
+ match.setField(MatchType.DL_SRC, HexEncode.bytesFromHexString(this.dlSrc));
}
if (this.dlDst != null) {
- match.setField(MatchType.DL_DST,
- HexEncode.bytesFromHexString(this.dlDst));
+ match.setField(MatchType.DL_DST, HexEncode.bytesFromHexString(this.dlDst));
}
if (this.etherType != null) {
- match.setField(MatchType.DL_TYPE, Integer.decode(etherType)
- .shortValue());
+ match.setField(MatchType.DL_TYPE, Integer.decode(etherType).shortValue());
}
if (this.vlanId != null) {
match.setField(MatchType.DL_VLAN, Short.parseShort(this.vlanId));
}
if (this.vlanPriority != null) {
- match.setField(MatchType.DL_VLAN_PR,
- Byte.parseByte(this.vlanPriority));
+ match.setField(MatchType.DL_VLAN_PR, Byte.parseByte(this.vlanPriority));
}
if (this.nwSrc != null) {
String parts[] = this.nwSrc.split("/");
InetAddress ip = NetUtils.parseInetAddress(parts[0]);
InetAddress mask = null;
+ int maskLen = 0;
if (parts.length > 1) {
- int maskLen = Integer.parseInt(parts[1]);
- mask = NetUtils.getInetNetworkMask(maskLen,
- ip instanceof Inet6Address);
+ maskLen = Integer.parseInt(parts[1]);
+ } else {
+ maskLen = (ip instanceof Inet6Address) ? 128 : 32;
}
+ mask = NetUtils.getInetNetworkMask(maskLen, ip instanceof Inet6Address);
match.setField(MatchType.NW_SRC, ip, mask);
}
if (this.nwDst != null) {
String parts[] = this.nwDst.split("/");
InetAddress ip = NetUtils.parseInetAddress(parts[0]);
InetAddress mask = null;
+ int maskLen = 0;
if (parts.length > 1) {
- int maskLen = Integer.parseInt(parts[1]);
- mask = NetUtils.getInetNetworkMask(maskLen,
- ip instanceof Inet6Address);
+ maskLen = Integer.parseInt(parts[1]);
+ } else {
+ maskLen = (ip instanceof Inet6Address) ? 128 : 32;
}
+ mask = NetUtils.getInetNetworkMask(maskLen, ip instanceof Inet6Address);
match.setField(MatchType.NW_DST, ip, mask);
}
if (this.protocol != null) {
- match.setField(MatchType.NW_PROTO,
- IPProtocols.getProtocolNumberByte(this.protocol));
+ match.setField(MatchType.NW_PROTO, IPProtocols.getProtocolNumberByte(this.protocol));
}
if (this.tosBits != null) {
match.setField(MatchType.NW_TOS, Byte.parseByte(this.tosBits));
}
if (this.tpSrc != null) {
- match.setField(MatchType.TP_SRC, Integer.valueOf(this.tpSrc)
- .shortValue());
+ match.setField(MatchType.TP_SRC, Integer.valueOf(this.tpSrc).shortValue());
}
if (this.tpDst != null) {
- match.setField(MatchType.TP_DST, Integer.valueOf(this.tpDst)
- .shortValue());
+ match.setField(MatchType.TP_DST, Integer.valueOf(this.tpDst).shortValue());
}
Flow flow = new Flow(match, getActionList());
return this.node.equals(node);
}
- public void toggleStatus() {
- installInHw = (installInHw == null) ? "true" : (installInHw
- .equals("true")) ? "false" : "true";
+ public void toggleInstallation() {
+ installInHw = (installInHw == null) ? "true" : (installInHw.equals("true")) ? "false" : "true";
}
/*
if (actions != null) {
Matcher sstr;
for (String actiongrp : actions) {
- sstr = Pattern.compile(ActionType.OUTPUT + "=(.*)").matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.OUTPUT + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
for (String t : sstr.group(1).split(",")) {
Matcher n = Pattern.compile("(?:(\\d+))").matcher(t);
if (n.matches()) {
if (n.group(1) != null) {
short ofPort = Short.parseShort(n.group(1));
- actionList.add(new Output(NodeConnectorCreator
- .createOFNodeConnector(ofPort,
- this.getNode())));
+ actionList.add(new Output(NodeConnectorCreator.createOFNodeConnector(ofPort,
+ this.getNode())));
}
}
}
continue;
}
- sstr = Pattern.compile(ActionType.DROP.toString()).matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.DROP.toString()).matcher(actiongrp);
if (sstr.matches()) {
actionList.add(new Drop());
continue;
}
- sstr = Pattern.compile(ActionType.LOOPBACK.toString()).matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.LOOPBACK.toString()).matcher(actiongrp);
if (sstr.matches()) {
actionList.add(new Loopback());
continue;
}
- sstr = Pattern.compile(ActionType.FLOOD.toString()).matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.FLOOD.toString()).matcher(actiongrp);
if (sstr.matches()) {
actionList.add(new Flood());
continue;
}
- sstr = Pattern.compile(ActionType.SW_PATH.toString()).matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.SW_PATH.toString()).matcher(actiongrp);
if (sstr.matches()) {
actionList.add(new SwPath());
continue;
}
- sstr = Pattern.compile(ActionType.HW_PATH.toString()).matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.HW_PATH.toString()).matcher(actiongrp);
if (sstr.matches()) {
actionList.add(new HwPath());
continue;
}
- sstr = Pattern.compile(ActionType.CONTROLLER.toString())
- .matcher(actiongrp);
+ sstr = Pattern.compile(ActionType.CONTROLLER.toString()).matcher(actiongrp);
if (sstr.matches()) {
actionList.add(new Controller());
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_VLAN_ID.toString() + "=(.*)").matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.SET_VLAN_ID.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- actionList.add(new SetVlanId(
- Short.parseShort(sstr.group(1))));
+ actionList.add(new SetVlanId(Short.parseShort(sstr.group(1))));
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_VLAN_PCP.toString() + "=(.*)").matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.SET_VLAN_PCP.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- actionList
- .add(new SetVlanPcp(Byte.parseByte(sstr.group(1))));
+ actionList.add(new SetVlanPcp(Byte.parseByte(sstr.group(1))));
continue;
}
- sstr = Pattern.compile(ActionType.POP_VLAN.toString()).matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.POP_VLAN.toString()).matcher(actiongrp);
if (sstr.matches()) {
actionList.add(new PopVlan());
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_DL_SRC.toString() + "=(.*)").matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.SET_DL_SRC.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- actionList.add(new SetDlSrc(HexEncode
- .bytesFromHexString(sstr.group(1))));
+ actionList.add(new SetDlSrc(HexEncode.bytesFromHexString(sstr.group(1))));
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_DL_DST.toString() + "=(.*)").matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.SET_DL_DST.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- actionList.add(new SetDlDst(HexEncode
- .bytesFromHexString(sstr.group(1))));
+ actionList.add(new SetDlDst(HexEncode.bytesFromHexString(sstr.group(1))));
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_NW_SRC.toString() + "=(.*)").matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.SET_NW_SRC.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- actionList.add(new SetNwSrc(NetUtils.parseInetAddress(sstr
- .group(1))));
+ actionList.add(new SetNwSrc(NetUtils.parseInetAddress(sstr.group(1))));
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_NW_DST.toString() + "=(.*)").matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.SET_NW_DST.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- actionList.add(new SetNwDst(NetUtils.parseInetAddress(sstr
- .group(1))));
+ actionList.add(new SetNwDst(NetUtils.parseInetAddress(sstr.group(1))));
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_NW_TOS.toString() + "=(.*)").matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.SET_NW_TOS.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
actionList.add(new SetNwTos(Byte.parseByte(sstr.group(1))));
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_TP_SRC.toString() + "=(.*)").matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.SET_TP_SRC.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- actionList
- .add(new SetTpSrc(Integer.valueOf(sstr.group(1))));
+ actionList.add(new SetTpSrc(Integer.valueOf(sstr.group(1))));
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_TP_DST.toString() + "=(.*)").matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.SET_TP_DST.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- actionList
- .add(new SetTpDst(Integer.valueOf(sstr.group(1))));
+ actionList.add(new SetTpDst(Integer.valueOf(sstr.group(1))));
continue;
}
- sstr = Pattern.compile(
- ActionType.SET_NEXT_HOP.toString() + "=(.*)").matcher(
- actiongrp);
+ sstr = Pattern.compile(ActionType.SET_NEXT_HOP.toString() + "=(.*)").matcher(actiongrp);
if (sstr.matches()) {
- actionList.add(new SetNextHop(NetUtils.parseInetAddress(sstr
- .group(1))));
+ actionList.add(new SetNextHop(NetUtils.parseInetAddress(sstr.group(1))));
continue;
}
}
* instance the flows constituting a policy all share the same group name.
*/
public class FlowEntry implements Cloneable, Serializable {
- protected static final Logger logger = LoggerFactory
- .getLogger(FlowEntry.class);
+ protected static final Logger logger = LoggerFactory.getLogger(FlowEntry.class);
private static final long serialVersionUID = 1L;
private static final Logger log = LoggerFactory.getLogger(FlowEntry.class);
private String groupName; // group name
return cloned;
}
+ /*
+ * Only accounts fields which uniquely identify a flow for collision
+ * purposes: node, match and priority
+ */
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
- result = prime * result + ((flow == null) ? 0 : flow.hashCode());
- result = prime * result
- + ((flowName == null) ? 0 : flowName.hashCode());
- result = prime * result
- + ((groupName == null) ? 0 : groupName.hashCode());
result = prime * result + ((node == null) ? 0 : node.hashCode());
+ result = prime * result + ((flow == null) ? 0 : (int) flow.getPriority());
+ result = prime * result + ((flow == null || flow.getMatch() == null) ? 0 : flow.getMatch().hashCode());
+
return result;
}
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
FlowEntry other = (FlowEntry) obj;
- if (flow == null) {
- if (other.flow != null)
+
+ if (node == null) {
+ if (other.node != null) {
return false;
- } else if (!flow.equals(other.flow))
+ }
+ } else if (!node.equals(other.node)) {
return false;
- if (flowName == null) {
- if (other.flowName != null)
- return false;
- } else if (!flowName.equals(other.flowName))
+ }
+
+ if (flow == null) {
+ return (other.flow == null) ? true : false;
+ } else if (other.flow == null) {
return false;
- if (groupName == null) {
- if (other.groupName != null)
- return false;
- } else if (!groupName.equals(other.groupName))
+ }
+ if (flow.getPriority() != other.flow.getPriority()) {
return false;
- if (node == null) {
- if (other.node != null)
+ }
+ if (flow.getMatch() == null) {
+ if (other.flow.getMatch() != null) {
return false;
- } else if (!node.equals(other.node))
+ }
+ } else if (!flow.getMatch().equals(other.flow.getMatch())) {
return false;
+ }
+
return true;
}
@Override
public String toString() {
- return "FlowEntry[flowName = " + flowName + ", groupName = "
- + groupName + ",node = " + node + ", flow = " + flow + "]";
+ return "FlowEntry[flowName = " + flowName + ", groupName = " + groupName + ", node = " + node + ", flow = "
+ + flow + "]";
}
private String constructFlowName() {
return this;
}
+
+ /**
+ * Returns whether this entry is the result of an internal generated static
+ * flow
+ *
+ * @return true if internal generated static flow, false otherwise
+ */
+ public boolean isInternal() {
+ return flowName.startsWith(FlowConfig.internalStaticFlowBegin);
+ }
}
package org.opendaylight.controller.forwardingrulesmanager;
+import java.io.Serializable;
+
import org.opendaylight.controller.sal.core.ContainerFlow;
import org.opendaylight.controller.sal.core.Node;
*
* Note: If the container flow is null, the install entry will be a clone of the
* original entry
- *
*/
-public class FlowEntryInstall {
- private FlowEntry original;
- private ContainerFlow cFlow;
- private FlowEntry install;
+public class FlowEntryInstall implements Serializable {
+ private static final long serialVersionUID = 1L;
+ private final FlowEntry original;
+ private final ContainerFlow cFlow;
+ private final FlowEntry install;
transient private long requestId; // async request id
transient private boolean deletePending;
public FlowEntryInstall(FlowEntry original, ContainerFlow cFlow) {
this.original = original;
this.cFlow = cFlow;
- this.install = (cFlow == null) ? original.clone() : original
- .mergeWith(cFlow);
+ this.install = (cFlow == null) ? original.clone() : original.mergeWith(cFlow);
deletePending = false;
requestId = 0;
}
+ /*
+ * Given FlowEntryInstall is used as key for FRM map which contains the
+ * software view of installed entries, having its hashcode tied to the one
+ * of the installed FlowEntry which takes into account the fields which
+ * uniquely identify a flow from switch point of view: node, match and
+ * priority.
+ */
@Override
public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((cFlow == null) ? 0 : cFlow.hashCode());
- result = prime * result + ((install == null) ? 0 : install.hashCode());
- result = prime * result
- + ((original == null) ? 0 : original.hashCode());
- return result;
+ return install.hashCode();
}
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
FlowEntryInstall other = (FlowEntryInstall) obj;
- if (cFlow == null) {
- if (other.cFlow != null)
- return false;
- } else if (!cFlow.equals(other.cFlow))
- return false;
if (install == null) {
- if (other.install != null)
+ if (other.install != null) {
return false;
- } else if (!install.equals(other.install))
- return false;
- if (original == null) {
- if (other.original != null)
- return false;
- } else if (!original.equals(other.original))
+ }
+ } else if (!install.equals(other.install)) {
return false;
+ }
return true;
}
return requestId;
}
+ /**
+ * Returns whether this entry is the result of an internal generated static
+ * flow
+ *
+ * @return true if internal generated static flow, false otherwise
+ */
+ public boolean isInternal() {
+ return original.isInternal();
+ }
+
@Override
public String toString() {
- return "[Install = " + install + " Original = " + original + " cFlow = "
- + cFlow + " rid = " + requestId + "]";
+ return "[Install = " + install + " Original = " + original + " cFlow = " + cFlow + " rid = " + requestId + "]";
}
}
/**
* Interface that describes methods for installing or removing forwarding rules
* and to access to the flows database.
- *
*/
public interface IForwardingRulesManager {
*/
public Status uninstallFlowEntry(FlowEntry flow);
+ /**
+ * It requests FRM to remove all the Flow Entry that are part of the
+ * specified group. FRM will request the SDN protocol plugin to uninstall
+ * the flows from the network node one by one. Based on the result of this
+ * operation FRM will update its database accordingly and will return the
+ * proper {@code Status} code.
+ *
+ * @param groupName
+ * the group name
+ * @return the {@code Status} object indicating the result of this action
+ */
+ public Status uninstallFlowEntryGroup(String groupName);
+
/**
* It requests FRM to replace the currently installed Flow Entry with the
* new one. It is up to the SDN protocol plugin to decide how to convey this
* not valid an error code is returned. If the existing flow is equal to the
* passed one it will be a no op and success code is returned.
*
- *
* @param newone
* the new flow entry to install
* @return the {@code Status} object indicating the result of this action
* call. A unique request id is returned to the caller. FRM will request the
* SDN protocol plugin to uninstall the flow from the network node. As
* immediate result of this asynchronous call, FRM will update its flow
- * database as if the flow was successfully installed.
+ * database as if the flow was successfully removed.
*
* @param flow
* the flow entry to uninstall
*/
public Status uninstallFlowEntryAsync(FlowEntry flow);
+ /**
+ * It requests FRM to remove all the Flow Entry that are part of the
+ * specified group through an asynchronous call. FRM will request the SDN
+ * protocol plugin to uninstall the flows from the network node one by one.
+ * As immediate result of this asynchronous call, FRM will update its flow
+ * database as if the flow was successfully removed.
+ *
+ * @param groupName
+ * the group name
+ * @return the {@code Status} object indicating the result of this action
+ */
+ public Status uninstallFlowEntryGroupAsync(String groupName);
+
/**
* It requests FRM to replace the currently installed Flow Entry with the
* new one through an asynchronous call. A unique request id is returned to
* @param dstPort
* the list of ports to be added to the flow output actions
*/
- public void addOutputPort(Node node, String flowName,
- List<NodeConnector> dstPort);
+ public void addOutputPort(Node node, String flowName, List<NodeConnector> dstPort);
/**
* Remove a list of output port from the flow with the specified name on the
* @param dstPortthe
* list of ports to be removed from the flow output actions
*/
- public void removeOutputPort(Node node, String flowName,
- List<NodeConnector> dstPort);
+ public void removeOutputPort(Node node, String flowName, List<NodeConnector> dstPort);
/**
* Replace the current output port in the specified flow with the specified
* @param dstPort
* the new output action port
*/
- public void replaceOutputPort(Node node, String flowName,
- NodeConnector outPort);
+ public void replaceOutputPort(Node node, String flowName, NodeConnector outPort);
/**
* Returns the output port configured on the specified flow
/**
* The interface which describes the methods forwarding rules manager will call
* for notifying the listeners of policy installation updates.
- *
*/
public interface IForwardingRulesManagerAware {
*
* PortGroup is used by PortGroupProvider application to signal a set of ports
* that represent a configured PortGroupConfig.
- *
- *
*/
public class PortGroup {
private long matrixSwitchId;
@Override
public String toString() {
- return "PortGroup [matrixSwitchId=" + matrixSwitchId + ", ports="
- + ports + "]";
+ return "PortGroup [matrixSwitchId=" + matrixSwitchId + ", ports=" + ports + "]";
}
}
* true indicates that the PortGroup is added. False indicates
* that the PortGroup is removed.
*/
- void portGroupChanged(PortGroupConfig config,
- Map<Node, PortGroup> portGroupData, boolean add);
+ void portGroupChanged(PortGroupConfig config, Map<Node, PortGroup> portGroupData, boolean add);
}
public int hashCode() {
final int prime = 31;
int result = 1;
- result = prime * result
- + ((matchString == null) ? 0 : matchString.hashCode());
+ result = prime * result + ((matchString == null) ? 0 : matchString.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public String toString() {
- return "PortGroupConfig [name=" + name + ", matchString=" + matchString
- + "]";
+ return "PortGroupConfig [name=" + name + ", matchString=" + matchString + "]";
}
}
* @return PortGroup data for a given Openflow switch.
* @see PortGroup
*/
- public PortGroup getPortGroupData(PortGroupConfig config,
- long matrixSwitchId);
+ public PortGroup getPortGroupData(PortGroupConfig config, long matrixSwitchId);
/**
* Registers a Listener for Port Group membership changes based on Custom
import java.util.HashSet;
import java.util.List;
import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
import org.junit.Assert;
import org.junit.Test;
import org.opendaylight.controller.sal.action.SetDlDst;
import org.opendaylight.controller.sal.action.SetNwDst;
import org.opendaylight.controller.sal.action.SetVlanId;
+import org.opendaylight.controller.sal.core.ContainerFlow;
import org.opendaylight.controller.sal.core.Node;
import org.opendaylight.controller.sal.core.NodeConnector;
import org.opendaylight.controller.sal.flowprogrammer.Flow;
import org.opendaylight.controller.sal.utils.IPProtocols;
import org.opendaylight.controller.sal.utils.NodeConnectorCreator;
import org.opendaylight.controller.sal.utils.NodeCreator;
+import org.opendaylight.controller.sal.utils.Status;
public class frmTest {
@Test
public void testFlowEntryInstall() throws UnknownHostException {
Node node = NodeCreator.createOFNode(1L);
- FlowEntry pol = new FlowEntry("polTest", null, getSampleFlowV6(node),
- node);
- FlowEntry pol2 = new FlowEntry("polTest2", null, getSampleFlowV6(node),
- node);
- FlowEntryInstall fei = new FlowEntryInstall(pol, null);
- FlowEntryInstall fei2 = new FlowEntryInstall(pol, null);
- FlowEntryInstall fei3 = new FlowEntryInstall(pol2, null);
+ FlowEntry pol = new FlowEntry("polTest", null, getSampleFlowV6(node), node);
+ FlowEntry pol2 = new FlowEntry("polTest2", null, getSampleFlowV6(node), node);
+ FlowEntryInstall fei = new FlowEntryInstall(pol.clone(), null);
+ FlowEntryInstall fei2 = new FlowEntryInstall(pol.clone(), null);
+ FlowEntryInstall fei3 = new FlowEntryInstall(pol2.clone(), null);
Assert.assertTrue(fei.getOriginal().equals(pol));
Assert.assertTrue(fei.getInstall().equals(pol));
Assert.assertTrue(fei.getFlowName().equals(pol.getFlowName()));
fei.toBeDeleted();
Assert.assertTrue(fei.isDeletePending());
Assert.assertNull(fei.getContainerFlow());
- Assert.assertTrue(fei.equalsByNodeAndName(pol.getNode(),
- pol.getFlowName()));
+ Assert.assertTrue(fei.equalsByNodeAndName(pol.getNode(), pol.getFlowName()));
Assert.assertTrue(fei.equals(fei2));
- fei2.getOriginal().setFlowName("polTest2");
Assert.assertFalse(fei.equals(null));
- Assert.assertFalse(fei.equals(fei3));
+ Assert.assertTrue(fei.equals(fei3));
}
@Test
public void testFlowEntryCreation() throws UnknownHostException {
Node node = NodeCreator.createOFNode(1L);
- FlowEntry pol = new FlowEntry("polTest", null, getSampleFlowV6(node),
- node);
+ FlowEntry pol = new FlowEntry("polTest", null, getSampleFlowV6(node), node);
Assert.assertTrue(pol.getFlow().equals(getSampleFlowV6(node)));
}
public void testFlowEntrySetGet() throws UnknownHostException {
Node node = NodeCreator.createOFNode(1L);
Node node2 = NodeCreator.createOFNode(2L);
- FlowEntry pol = new FlowEntry("polTest", null, getSampleFlowV6(node),
- node);
+ FlowEntry pol = new FlowEntry("polTest", null, getSampleFlowV6(node), node);
pol.setGroupName("polTest2");
pol.setFlowName("flowName");
Assert.assertTrue(pol.getFlowName().equals("flowName"));
public void testFlowEntryEquality() throws UnknownHostException {
Node node = NodeCreator.createOFNode(1L);
Node node2 = NodeCreator.createOFNode(1L);
- FlowEntry pol = new FlowEntry("polTest", null, getSampleFlowV6(node),
- node);
- FlowEntry pol2 = new FlowEntry("polTest", null, getSampleFlowV6(node),
- node2);
+ FlowEntry pol = new FlowEntry("polTest", null, getSampleFlowV6(node), node);
+ FlowEntry pol2 = new FlowEntry("polTest", null, getSampleFlowV6(node), node2);
Assert.assertTrue(pol.equals(pol2));
}
+ @Test
+ public void testFlowEntryCollision() throws UnknownHostException {
+ // Create 2 equal FlowEntry objects
+ Node node1 = NodeCreator.createOFNode(1L);
+ Node node2 = NodeCreator.createOFNode(1L);
+ FlowEntry fe1 = new FlowEntry("Junit", "flow1", getSampleFlowV6(node1), node1);
+ FlowEntry fe2 = new FlowEntry("Junit", "flow2", getSampleFlowV6(node2), node1);
+
+ // Check equality in FlowEntry and parameters
+ Assert.assertTrue(fe1.getFlow().getMatch().equals(fe2.getFlow().getMatch()));
+ Assert.assertTrue(fe1.getFlow().getMatch().getMatches() == fe2.getFlow().getMatch().getMatches());
+ Assert.assertTrue(fe1.getFlow().getMatch().hashCode() == fe2.getFlow().getMatch().hashCode());
+ Assert.assertTrue(fe1.getFlow().hashCode() == fe2.getFlow().hashCode());
+ Assert.assertTrue(fe1.equals(fe2));
+ Assert.assertTrue(fe1.hashCode() == fe2.hashCode());
+
+ // Change priority field for fe2, verify inequality
+ fe2.getFlow().setPriority((short)1000);
+
+ // Verify FlowEntry works as key in collection
+ ConcurrentMap<FlowEntry, FlowEntry> map = new ConcurrentHashMap<FlowEntry, FlowEntry>();
+ Assert.assertTrue(null == map.put(fe1, fe1));
+ Assert.assertTrue(fe1.clone().equals(map.put(fe1.clone(), fe1.clone())));
+ Assert.assertTrue(map.get(fe1.clone()).equals(fe1.clone()));
+ Assert.assertTrue(map.keySet().contains(fe1.clone()));
+ Assert.assertTrue(map.containsKey(fe1));
+
+ // Remove key
+ map.remove(fe1);
+ Assert.assertTrue(map.isEmpty());
+ Assert.assertFalse(map.containsKey(fe1));
+
+ // Verify cloned object as key
+ map.put(fe1.clone(), fe1.clone());
+ Assert.assertTrue(map.containsKey(fe1));
+
+ // Verify different key is not present
+ Assert.assertFalse(map.containsKey(fe2));
+
+ // Add different key
+ map.put(fe2.clone(), fe2.clone());
+ Assert.assertTrue(map.size() == 2);
+ Assert.assertTrue(map.containsKey(fe1));
+ Assert.assertTrue(map.containsKey(fe2));
+
+ // Make fe2 equal to fe1 again
+ fe2.getFlow().setPriority((short)300);
+ Assert.assertTrue(fe2.equals(fe1));
+ Assert.assertTrue(map.containsKey(fe2));
+
+ // Clean up
+ map.clear();
+ }
+
+ @Test
+ public void testFlowEntryInstallCollision() throws UnknownHostException {
+ // Create 2 equal FlowEntryInstall objects
+ Node node1 = NodeCreator.createOFNode(1L);
+ Node node2 = NodeCreator.createOFNode(1L);
+ FlowEntry fe1 = new FlowEntry("Junit", "flow1", getSampleFlowV6(node1), node1);
+ FlowEntry fe2 = new FlowEntry("Junit", "flow2", getSampleFlowV6(node2), node1);
+ ContainerFlow cf1 = null;
+ ContainerFlow cf2 = null;
+ FlowEntryInstall fei1 = new FlowEntryInstall(fe1, cf1);
+ FlowEntryInstall fei2 = new FlowEntryInstall(fe2, cf2);
+
+ // Check equality in FlowEntry and parameters
+ Assert.assertTrue(fei1.equals(fei2));
+ Assert.assertTrue(fei1.hashCode() == fei2.hashCode());
+
+ // Verify FlowEntryInstall works as key in collection
+ ConcurrentMap<FlowEntryInstall, FlowEntryInstall> map =
+ new ConcurrentHashMap<FlowEntryInstall, FlowEntryInstall>();
+ Assert.assertTrue(null == map.put(fei1, fei1));
+ Assert.assertTrue(map.get(fei1).equals(fei2));
+ Assert.assertTrue(map.keySet().contains(fei1));
+ Assert.assertTrue(map.keySet().contains(fei2));
+ Assert.assertTrue(map.containsKey(fei1));
+
+ // Remove key
+ map.remove(fei1);
+ Assert.assertTrue(map.isEmpty());
+ Assert.assertFalse(map.containsKey(fei1));
+
+ // Verify cloned object as key
+ map.put(fei1, fei1);
+ Assert.assertTrue(map.containsKey(fei1));
+
+ // Change fei2, change relevant hashcode info
+ fei2.getInstall().getFlow().setPriority((short)301);
+ Assert.assertFalse(fei1.equals(fei2));
+ Assert.assertFalse(fei1.hashCode() == fei2.hashCode());
+
+
+ // Verify different key is not present
+ Assert.assertFalse(map.containsKey(fei2));
+
+ // Add different key
+ map.put(fei2, fei2);
+ Assert.assertTrue(map.size() == 2);
+ Assert.assertTrue(map.containsKey(fei1));
+ Assert.assertTrue(map.containsKey(fei2));
+
+ // Make fei2 equal to fei1 again
+ fei2.getInstall().getFlow().setPriority((short)300);
+ Assert.assertTrue(fei2.equals(fei1));
+ Assert.assertTrue(map.containsKey(fei2));
+
+ // Clean up
+ map.clear();
+ }
+
@Test
public void testFlowEntryCloning() throws UnknownHostException {
Node node = NodeCreator.createOFNode(1L);
- FlowEntry pol = new FlowEntry("polTest", null, getSampleFlowV6(node),
- node);
+ FlowEntry pol = new FlowEntry("polTest", null, getSampleFlowV6(node), node);
FlowEntry pol2 = pol.clone();
Assert.assertTrue(pol.equals(pol2));
}
FlowConfig frmC = new FlowConfig();
FlowConfig frmC3 = new FlowConfig();
Node node = NodeCreator.createOFNode(1L);
- FlowEntry entry = new FlowEntry("polTest", null, getSampleFlowV6(node),
- node);
+ FlowEntry entry = new FlowEntry("polTest", null, getSampleFlowV6(node), node);
// testing equal function
Assert.assertFalse(frmC.equals(null));
Assert.assertFalse(frmC.equals(flowC));
frmC.setNode(Node.fromString(Node.NodeIDType.OPENFLOW, "1"));
- Assert.assertTrue(frmC.getNode().equals(
- Node.fromString(Node.NodeIDType.OPENFLOW, "1")));
+ Assert.assertTrue(frmC.getNode().equals(Node.fromString(Node.NodeIDType.OPENFLOW, "1")));
Assert.assertFalse(frmC.equals(frmC3));
frmC3.setNode(Node.fromString(Node.NodeIDType.OPENFLOW, "1"));
FlowConfig fc2 = new FlowConfig();
fc.setName("flow1");
fc.setNode(Node.fromString(Node.NodeIDType.OPENFLOW, "1"));
- Assert.assertFalse(fc.onNode(Node.fromString(Node.NodeIDType.OPENFLOW,
- "0")));
- Assert.assertTrue(fc.onNode(Node.fromString(Node.NodeIDType.OPENFLOW,
- "1")));
-
- Assert.assertTrue(fc.isByNameAndNodeIdEqual("flow1",
- Node.fromString(Node.NodeIDType.OPENFLOW, "1")));
- Assert.assertFalse(fc.isByNameAndNodeIdEqual("flow1",
- Node.fromString(Node.NodeIDType.OPENFLOW, "0")));
- Assert.assertFalse(fc.isByNameAndNodeIdEqual("flow2",
- Node.fromString(Node.NodeIDType.OPENFLOW, "1")));
+ Assert.assertFalse(fc.onNode(Node.fromString(Node.NodeIDType.OPENFLOW, "0")));
+ Assert.assertTrue(fc.onNode(Node.fromString(Node.NodeIDType.OPENFLOW, "1")));
+
+ Assert.assertTrue(fc.isByNameAndNodeIdEqual("flow1", Node.fromString(Node.NodeIDType.OPENFLOW, "1")));
+ Assert.assertFalse(fc.isByNameAndNodeIdEqual("flow1", Node.fromString(Node.NodeIDType.OPENFLOW, "0")));
+ Assert.assertFalse(fc.isByNameAndNodeIdEqual("flow2", Node.fromString(Node.NodeIDType.OPENFLOW, "1")));
Assert.assertFalse(fc.isByNameAndNodeIdEqual(fc2));
fc2.setName("flow1");
@Test
public void testStatusToggle() throws UnknownHostException {
FlowConfig fc = new FlowConfig();
- fc.toggleStatus();
+ fc.toggleInstallation();
Assert.assertTrue(fc.installInHw());
- fc.toggleStatus();
+ fc.toggleInstallation();
Assert.assertFalse(fc.installInHw());
- fc.toggleStatus();
+ fc.toggleInstallation();
Assert.assertTrue(fc.installInHw());
}
@Test
public void testValid() throws UnknownHostException {
- StringBuffer sb = new StringBuffer();
- sb.setLength(0);
FlowConfig fc2 = createSampleFlowConfig();
- Assert.assertTrue(fc2.isValid(null, sb));
+ Assert.assertTrue(fc2.validate(null).isSuccess());
FlowConfig fc = new FlowConfig();
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Name is null"));
+ Status status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Invalid name"));
fc.setName("Config");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Node is null"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Node is null"));
fc.setNode(Node.fromString(Node.NodeIDType.OPENFLOW, "1"));
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setPriority("-1");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains(
- "is not in the range 0 - 65535"));
- sb.setLength(0);
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("is not in the range 0 - 65535"));
fc.setPriority("100000");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains(
- "is not in the range 0 - 65535"));
- sb.setLength(0);
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("is not in the range 0 - 65535"));
+
fc.setPriority("2000");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setCookie("100");
+ Assert.assertTrue(fc.validate(null).isSuccess());
+
fc.setIngressPort("-1");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("is not valid for the Switch"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("is not valid for the Switch"));
+
fc.setIngressPort("100");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setVlanId(("-1"));
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString()
- .contains("is not in the range 0 - 4095"));
- sb.setLength(0);
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("is not in the range 0 - 4095"));
+
fc.setVlanId("5000");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString()
- .contains("is not in the range 0 - 4095"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("is not in the range 0 - 4095"));
+
fc.setVlanId("100");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
+
fc.setVlanPriority("-1");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("is not in the range 0 - 7"));
- sb.setLength(0);
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("is not in the range 0 - 7"));
+
fc.setVlanPriority("9");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("is not in the range 0 - 7"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("is not in the range 0 - 7"));
+
fc.setVlanPriority("5");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setEtherType("-1");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Ethernet type"));
- sb.setLength(0);
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Ethernet type"));
+
fc.setEtherType("0xfffff");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Ethernet type"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Ethernet type"));
+
fc.setEtherType("0x800");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setTosBits("-1");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("IP ToS bits"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("IP ToS bits"));
+
fc.setTosBits("65");
- sb.setLength(0);
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("IP ToS bits"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("IP ToS bits"));
+
fc.setTosBits("60");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setSrcPort("-1");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Transport source port"));
- sb.setLength(0);
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Transport source port"));
+
fc.setSrcPort("0xfffff");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Transport source port"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Transport source port"));
+
fc.setSrcPort("0x00ff");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setDstPort("-1");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Transport destination port"));
- sb.setLength(0);
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Transport destination port"));
+
fc.setDstPort("0xfffff");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Transport destination port"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Transport destination port"));
+
fc.setDstPort("0x00ff");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setSrcMac("abc");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Ethernet source address"));
- sb.setLength(0);
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Ethernet source address"));
+
fc.setSrcMac("00:A0:C9:14:C8:29");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setDstMac("abc");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString()
- .contains("Ethernet destination address"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Ethernet destination address"));
+
fc.setDstMac("00:A0:C9:22:AB:11");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setSrcIp("-1");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("IP source address"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("IP source address"));
+
fc.setSrcIp("2001:420:281:1004:407a:57f4:4d15:c355");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains(
- "Type mismatch between Ethernet & Src IP"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Type mismatch between Ethernet & Src IP"));
fc.setEtherType("0x86dd");
- Assert.assertTrue(fc.isValid(null, sb));
- sb.setLength(0);
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setSrcIp("1.1.1.1");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains(
- "Type mismatch between Ethernet & Src IP"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Type mismatch between Ethernet & Src IP"));
+
fc.setEtherType("0x800");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setDstIp("-1");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("IP destination address"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("IP destination address"));
+
fc.setDstIp("2001:420:281:1004:407a:57f4:4d15:c355");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains(
- "Type mismatch between Ethernet & Dst IP"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Type mismatch between Ethernet & Dst IP"));
fc.setEtherType("0x86dd");
fc.setSrcIp("2001:420:281:1004:407a:57f4:4d15:c355");
- Assert.assertTrue(fc.isValid(null, sb));
- sb.setLength(0);
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setDstIp("2.2.2.2");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains(
- "Type mismatch between Ethernet & Dst IP"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Type mismatch between Ethernet & Dst IP"));
+
fc.setEtherType("0x800");
fc.setSrcIp("1.1.1.1");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setEtherType(null);
fc.setSrcIp("2001:420:281:1004:407a:57f4:4d15:c355");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("IP Src Dest Type mismatch"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("IP Src Dest Type mismatch"));
+
fc.setSrcIp("1.1.1.1");
fc.setIdleTimeout("-1");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Idle Timeout value"));
- sb.setLength(0);
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Idle Timeout value"));
+
fc.setIdleTimeout("0xfffff");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Idle Timeout value"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Idle Timeout value"));
+
fc.setIdleTimeout("10");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
fc.setHardTimeout("-1");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Hard Timeout value"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Hard Timeout value"));
+
fc.setHardTimeout("0xfffff");
- Assert.assertFalse(fc.isValid(null, sb));
- Assert.assertTrue(sb.toString().contains("Hard Timeout value"));
+ status = fc.validate(null);
+ Assert.assertFalse(status.isSuccess());
+ Assert.assertTrue(status.getDescription().contains("Hard Timeout value"));
+
fc.setHardTimeout("10");
- Assert.assertTrue(fc.isValid(null, sb));
+ Assert.assertTrue(fc.validate(null).isSuccess());
}
ArrayList<String> actions;
actions = createSampleActionList();
// actions.add(ActionType.CONTROLLER.toString());
- FlowConfig flowConfig = new FlowConfig("true", "Config1",
- Node.fromString(Node.NodeIDType.OPENFLOW, "1"), "100", "0",
- "60", "2", "100", "0", "0x0800", "00:A0:C9:14:C8:29",
- "00:A0:C9:22:AB:11", IPProtocols.TCP.toString(), "0",
- "1.2.3.4", "2.2.2.2", "8080", "100", "300", "1000", actions);
+ FlowConfig flowConfig = new FlowConfig("true", "Config1", Node.fromString(Node.NodeIDType.OPENFLOW, "1"),
+ "100", "0", "60", "2", "100", "0", "0x0800", "00:A0:C9:14:C8:29", "00:A0:C9:22:AB:11",
+ IPProtocols.TCP.toString(), "0", "1.2.3.4", "2.2.2.2", "8080", "100", "300", "1000", actions);
return flowConfig;
}
}
private Flow getSampleFlowV6(Node node) throws UnknownHostException {
- NodeConnector port = NodeConnectorCreator.createOFNodeConnector(
- (short) 24, node);
- NodeConnector oport = NodeConnectorCreator.createOFNodeConnector(
- (short) 30, node);
- byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78,
- (byte) 0x9a, (byte) 0xbc };
- byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d,
- (byte) 0x5e, (byte) 0x6f };
- byte newMac[] = { (byte) 0x11, (byte) 0xaa, (byte) 0xbb, (byte) 0x34,
- (byte) 0x9a, (byte) 0xee };
- InetAddress srcIP = InetAddress
- .getByName("2001:420:281:1004:407a:57f4:4d15:c355");
- InetAddress dstIP = InetAddress
- .getByName("2001:420:281:1004:e123:e688:d655:a1b0");
- InetAddress ipMask = InetAddress
- .getByName("ffff:ffff:ffff:ffff:0:0:0:0");
- InetAddress ipMask2 = InetAddress
- .getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:0");
+ NodeConnector port = NodeConnectorCreator.createOFNodeConnector((short) 24, node);
+ NodeConnector oport = NodeConnectorCreator.createOFNodeConnector((short) 30, node);
+ byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78, (byte) 0x9a, (byte) 0xbc };
+ byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d, (byte) 0x5e, (byte) 0x6f };
+ byte newMac[] = { (byte) 0x11, (byte) 0xaa, (byte) 0xbb, (byte) 0x34, (byte) 0x9a, (byte) 0xee };
+ InetAddress srcIP = InetAddress.getByName("2001:420:281:1004:407a:57f4:4d15:c355");
+ InetAddress dstIP = InetAddress.getByName("2001:420:281:1004:e123:e688:d655:a1b0");
+ InetAddress ipMask = InetAddress.getByName("ffff:ffff:ffff:ffff:0:0:0:0");
+ InetAddress ipMask2 = InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:0");
InetAddress newIP = InetAddress.getByName("2056:650::a1b0");
short ethertype = EtherTypes.IPv6.shortValue();
short vlan = (short) 27;
actions.add(new PopVlan());
actions.add(new Flood());
- actions.add(new Controller());
-
Flow flow = new Flow(match, actions);
flow.setPriority((short) 300);
flow.setHardTimeout((short) 240);
import org.opendaylight.controller.hosttracker.IfIptoHost;
public class Activator extends ComponentActivatorAbstractBase {
- protected static final Logger logger = LoggerFactory
- .getLogger(Activator.class);
+ protected static final Logger logger = LoggerFactory.getLogger(Activator.class);
/**
* Function called when the activator starts just after some initializations
// export the service
if (containerName.equals(GlobalConstants.DEFAULT.toString())) {
- interfaces = new String[] { IContainerListener.class.getName(),
- ISwitchManagerAware.class.getName(),
- IForwardingRulesManager.class.getName(),
- IInventoryListener.class.getName(),
- ICacheUpdateAware.class.getName(),
- IConfigurationContainerAware.class.getName(),
+ interfaces = new String[] { IContainerListener.class.getName(), ISwitchManagerAware.class.getName(),
+ IForwardingRulesManager.class.getName(), IInventoryListener.class.getName(),
+ ICacheUpdateAware.class.getName(), IConfigurationContainerAware.class.getName(),
IFlowProgrammerListener.class.getName() };
} else {
- interfaces = new String[] {
- ISwitchManagerAware.class.getName(),
- IForwardingRulesManager.class.getName(),
- IInventoryListener.class.getName(),
- ICacheUpdateAware.class.getName(),
- IConfigurationContainerAware.class.getName(),
+ interfaces = new String[] { ISwitchManagerAware.class.getName(),
+ IForwardingRulesManager.class.getName(), IInventoryListener.class.getName(),
+ ICacheUpdateAware.class.getName(), IConfigurationContainerAware.class.getName(),
IFlowProgrammerListener.class.getName() };
}
c.setInterface(interfaces, props);
- c.add(createContainerServiceDependency(containerName)
- .setService(IFlowProgrammerService.class)
- .setCallbacks("setFlowProgrammerService",
- "unsetFlowProgrammerService").setRequired(true));
+ c.add(createContainerServiceDependency(containerName).setService(IFlowProgrammerService.class)
+ .setCallbacks("setFlowProgrammerService", "unsetFlowProgrammerService").setRequired(true));
- c.add(createContainerServiceDependency(containerName)
- .setService(IClusterContainerServices.class)
- .setCallbacks("setClusterContainerService",
- "unsetClusterContainerService").setRequired(true));
- c.add(createContainerServiceDependency(containerName)
- .setService(ISwitchManager.class)
- .setCallbacks("setSwitchManager", "unsetSwitchManager")
- .setRequired(true));
- c.add(createContainerServiceDependency(containerName)
- .setService(IForwardingRulesManagerAware.class)
- .setCallbacks("setFrmAware", "unsetFrmAware")
- .setRequired(false));
- c.add(createContainerServiceDependency(containerName)
- .setService(IfIptoHost.class)
- .setCallbacks("setHostFinder", "unsetHostFinder")
- .setRequired(true));
- c.add(createContainerServiceDependency(containerName)
- .setService(IContainer.class)
- .setCallbacks("setIContainer", "unsetIContainer")
- .setRequired(true));
+ c.add(createContainerServiceDependency(containerName).setService(IClusterContainerServices.class)
+ .setCallbacks("setClusterContainerService", "unsetClusterContainerService").setRequired(true));
+ c.add(createContainerServiceDependency(containerName).setService(ISwitchManager.class)
+ .setCallbacks("setSwitchManager", "unsetSwitchManager").setRequired(true));
+ c.add(createContainerServiceDependency(containerName).setService(IForwardingRulesManagerAware.class)
+ .setCallbacks("setFrmAware", "unsetFrmAware").setRequired(false));
+ c.add(createContainerServiceDependency(containerName).setService(IfIptoHost.class)
+ .setCallbacks("setHostFinder", "unsetHostFinder").setRequired(true));
+ c.add(createContainerServiceDependency(containerName).setService(IContainer.class)
+ .setCallbacks("setIContainer", "unsetIContainer").setRequired(true));
}
}
}
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
-import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
+import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.LinkedBlockingQueue;
import org.eclipse.osgi.framework.console.CommandInterpreter;
import org.eclipse.osgi.framework.console.CommandProvider;
* the network. It also maintains the central repository of all the forwarding
* rules installed on the network nodes.
*/
-public class ForwardingRulesManagerImpl implements IForwardingRulesManager,
- PortGroupChangeListener, IContainerListener, ISwitchManagerAware,
- IConfigurationContainerAware, IInventoryListener, IObjectReader,
- ICacheUpdateAware<Long, String>, CommandProvider,
- IFlowProgrammerListener {
+public class ForwardingRulesManagerImpl implements IForwardingRulesManager, PortGroupChangeListener,
+ IContainerListener, ISwitchManagerAware, IConfigurationContainerAware, IInventoryListener, IObjectReader,
+ ICacheUpdateAware<Long, String>, CommandProvider, IFlowProgrammerListener {
private static final String SAVE = "Save";
private static final String NODEDOWN = "Node is Down";
- private static final Logger log = LoggerFactory
- .getLogger(ForwardingRulesManagerImpl.class);
+ private static final String SUCCESS = StatusCode.SUCCESS.toString();
+ private static final Logger log = LoggerFactory.getLogger(ForwardingRulesManagerImpl.class);
private Map<Long, String> flowsSaveEvent;
private String frmFileName;
private String portGroupFileName;
private ConcurrentMap<String, PortGroupConfig> portGroupConfigs;
private ConcurrentMap<PortGroupConfig, Map<Node, PortGroup>> portGroupData;
private ConcurrentMap<String, Object> TSPolicies;
- private boolean inContainerMode; // being used by default instance only
+ private boolean inContainerMode; // being used by global instance only
+ private boolean stopping;
+
/*
- * Flow database. It's the software view of what was installed on the
- * switch. It is indexed by node. For convenience a version indexed by group
- * name is also maintained. The core element is a class which contains the
- * flow entry pushed by the functional modules and the respective container
- * flow merged version. In absence of container flows, the two flow entries
- * are the same.
+ * Flow database. It's the software view of what was requested to install
+ * and what is installed on the switch. It is indexed by the entry itself.
+ * The entry's hashcode resumes the network node index, the flow's priority
+ * and the flow's match. The value element is a class which contains the
+ * flow entry pushed by the applications modules and the respective
+ * container flow merged version. In absence of container flows, the two
+ * flow entries are the same.
*/
- private ConcurrentMap<Node, Set<FlowEntryInstall>> nodeFlows;
- private ConcurrentMap<String, Set<FlowEntryInstall>> groupFlows;
+ private ConcurrentMap<FlowEntry, FlowEntry> originalSwView;
+ private ConcurrentMap<FlowEntryInstall, FlowEntryInstall> installedSwView;
+ /*
+ * Per node and per group indexing
+ */
+ private ConcurrentMap<Node, List<FlowEntryInstall>> nodeFlows;
+ private ConcurrentMap<String, List<FlowEntryInstall>> groupFlows;
+
/*
* Inactive flow list. This is for the global instance of FRM It will
* contain all the flow entries which were installed on the global container
private IFlowProgrammerService programmer;
private IClusterContainerServices clusterContainerService = null;
private ISwitchManager switchManager;
+ private Thread frmEventHandler;
+ protected BlockingQueue<FRMEvent> pendingEvents;
/**
* Adds a flow entry onto the network node It runs various validity checks
* the original flow entry application requested to add
* @param async
* the flag indicating if this is a asynchronous request
- * @return the status of this request. In case of asynchronous call, it
- * will contain the unique id assigned to this request
+ * @return the status of this request. In case of asynchronous call, it will
+ * contain the unique id assigned to this request
*/
private Status addEntry(FlowEntry flowEntry, boolean async) {
* Derive the container flow merged entries to install In presence of N
* container flows, we may end up with N different entries to install...
*/
- List<FlowEntryInstall> toInstallList = deriveInstallEntries(
- flowEntry.clone(), container.getContainerFlows());
+ List<FlowEntryInstall> toInstallList = deriveInstallEntries(flowEntry.clone(), container.getContainerFlows());
// Container Flow conflict Check
if (toInstallList.isEmpty()) {
for (FlowEntryInstall entry : toInstallList) {
// Conflict Check: Verify new entry would not overwrite existing
// ones
- if (findMatch(entry.getInstall(), false) != null) {
- log.warn("Operation Rejected: A flow with same match "
- + "and priority exists on the target node");
+ if (this.installedSwView.containsKey(entry)) {
+ log.warn("Operation Rejected: A flow with same match and priority exists on the target node");
log.trace("Aborting to install {}", entry);
continue;
}
// Declare failure if all the container flow merged entries clash with
// existing entries
if (toInstallSafe.size() == 0) {
- String msg = "A flow with same match and priority exists "
- + "on the target node";
+ String msg = "A flow with same match and priority exists on the target node";
String logMsg = msg + ": {}";
log.warn(logMsg, flowEntry);
return new Status(StatusCode.CONFLICT, msg);
if (ret.isSuccess()) {
oneSucceded = true;
/*
- * The first successful status response will be returned
- * For the asynchronous call, we can discard the container flow
- * complication for now and assume we will always deal with
- * one flow only per request
+ * The first successful status response will be returned For the
+ * asynchronous call, we can discard the container flow
+ * complication for now and assume we will always deal with one
+ * flow only per request
*/
succeded = ret;
} else {
error = ret;
- log.warn("Failed to install the entry: {}. The failure is: {}",
- installEntry, ret.getDescription());
+ log.warn("Failed to install the entry: {}. The failure is: {}", installEntry, ret.getDescription());
}
}
* @return the list of container flow merged entries good to be installed on
* this container
*/
- private List<FlowEntryInstall> deriveInstallEntries(FlowEntry request,
- List<ContainerFlow> cFlowList) {
- List<FlowEntryInstall> toInstallList = new ArrayList<FlowEntryInstall>(
- 1);
+ private List<FlowEntryInstall> deriveInstallEntries(FlowEntry request, List<ContainerFlow> cFlowList) {
+ List<FlowEntryInstall> toInstallList = new ArrayList<FlowEntryInstall>(1);
- if (container.getContainerFlows() == null
- || container.getContainerFlows().isEmpty()) {
+ if (container.getContainerFlows() == null || container.getContainerFlows().isEmpty()) {
// No container flows => entry good to be installed unchanged
toInstallList.add(new FlowEntryInstall(request.clone(), null));
} else {
// created
for (ContainerFlow cFlow : container.getContainerFlows()) {
if (cFlow.allowsFlow(request.getFlow())) {
- toInstallList.add(new FlowEntryInstall(request.clone(),
- cFlow));
+ toInstallList.add(new FlowEntryInstall(request.clone(), cFlow));
}
}
}
* @param newFlowEntry
* @param async
* the flag indicating if this is a asynchronous request
- * @return the status of this request. In case of asynchronous call, it
- * will contain the unique id assigned to this request
+ * @return the status of this request. In case of asynchronous call, it will
+ * contain the unique id assigned to this request
*/
- private Status modifyEntry(FlowEntry currentFlowEntry,
- FlowEntry newFlowEntry, boolean async) {
-
+ private Status modifyEntry(FlowEntry currentFlowEntry, FlowEntry newFlowEntry, boolean async) {
Status retExt;
// Sanity checks
- if (currentFlowEntry == null || currentFlowEntry.getNode() == null
- || newFlowEntry == null || newFlowEntry.getNode() == null) {
+ if (currentFlowEntry == null || currentFlowEntry.getNode() == null || newFlowEntry == null
+ || newFlowEntry.getNode() == null) {
String msg = "Modify: Invalid FlowEntry";
String logMsg = msg + ": {} or {}";
log.warn(logMsg, currentFlowEntry, newFlowEntry);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
if (!currentFlowEntry.getNode().equals(newFlowEntry.getNode())
- || !currentFlowEntry.getFlowName().equals(
- newFlowEntry.getFlowName())) {
+ || !currentFlowEntry.getFlowName().equals(newFlowEntry.getFlowName())) {
String msg = "Modify: Incompatible Flow Entries";
String logMsg = msg + ": {} and {}";
log.warn(logMsg, currentFlowEntry, newFlowEntry);
}
// Equality Check
- if (currentFlowEntry.equals(newFlowEntry)) {
+ if (currentFlowEntry.getFlow().equals(newFlowEntry.getFlow())) {
String msg = "Modify skipped as flows are the same";
String logMsg = msg + ": {} and {}";
log.debug(logMsg, currentFlowEntry, newFlowEntry);
return new Status(StatusCode.SUCCESS, msg);
}
- // Conflict Check: Verify the new entry would not conflict with another
- // existing one
- // This is a loose check on the previous original flow entry requests.
- // No check
- // on the container flow merged flow entries (if any) yet
- FlowEntryInstall sameMatchOriginalEntry = findMatch(newFlowEntry, true);
- if (sameMatchOriginalEntry != null
- && !sameMatchOriginalEntry.getOriginal().equals(
- currentFlowEntry)) {
- String msg = "Operation Rejected: Another flow with same match "
- + "and priority exists on the target node";
+ /*
+ * Conflict Check: Verify the new entry would not conflict with an
+ * existing one. This is a loose check on the previous original flow
+ * entry requests. No check on the container flow merged flow entries
+ * (if any) yet
+ */
+ FlowEntry sameMatchOriginalEntry = originalSwView.get(newFlowEntry);
+ if (sameMatchOriginalEntry != null && !sameMatchOriginalEntry.equals(currentFlowEntry)) {
+ String msg = "Operation Rejected: Another flow with same match and priority exists on the target node";
String logMsg = msg + ": {}";
log.warn(logMsg, currentFlowEntry);
return new Status(StatusCode.CONFLICT, msg);
}
// Derive the installed and toInstall entries
- List<FlowEntryInstall> installedList = deriveInstallEntries(
- currentFlowEntry.clone(), container.getContainerFlows());
- List<FlowEntryInstall> toInstallList = deriveInstallEntries(
- newFlowEntry.clone(), container.getContainerFlows());
+ List<FlowEntryInstall> installedList = deriveInstallEntries(currentFlowEntry.clone(),
+ container.getContainerFlows());
+ List<FlowEntryInstall> toInstallList = deriveInstallEntries(newFlowEntry.clone(), container.getContainerFlows());
if (toInstallList.isEmpty()) {
- String msg = "Modify Operation Rejected: The new entry "
- + "conflicts with all the container flows";
+ String msg = "Modify Operation Rejected: The new entry conflicts with all the container flows";
String logMsg = msg + ": {}";
log.warn(logMsg, newFlowEntry);
log.warn(msg);
}
List<FlowEntryInstall> toInstallSafe = new ArrayList<FlowEntryInstall>();
for (FlowEntryInstall installEntry : toInstallList) {
- // Conflict Check: Verify the new entry would not overwrite another
- // existing one
- FlowEntryInstall sameMatchEntry = findMatch(
- installEntry.getInstall(), false);
- if (sameMatchEntry != null
- && !sameMatchEntry.getOriginal().equals(currentFlowEntry)) {
- log.info("Modify: new container flow merged flow entry "
- + "clashes with existing flow");
+ /*
+ * Conflict Check: Verify the new entry would not overwrite another
+ * existing one
+ */
+ FlowEntryInstall sameMatchEntry = installedSwView.get(installEntry);
+ if (sameMatchEntry != null && !sameMatchEntry.getOriginal().equals(currentFlowEntry)) {
+ log.info("Modify: new container flow merged flow entry clashes with existing flow");
decouple = true;
} else {
toInstallSafe.add(installEntry);
int size = toInstallList.size();
while (i < size) {
// Modify and update database
- retModify = modifyEntryInternal(installedList.get(i),
- toInstallList.get(i), async);
+ retModify = modifyEntryInternal(installedList.get(i), toInstallList.get(i), async);
if (retModify.isSuccess()) {
i++;
} else {
}
// Check if uncompleted modify
if (i < size) {
- log.warn("Unable to perform a complete modify for all "
- + "the container flows merged entries");
+ log.warn("Unable to perform a complete modify for all the container flows merged entries");
// Restore original entries
int j = 0;
while (j < i) {
log.info("Attempting to restore initial entries");
- retExt = modifyEntryInternal(toInstallList.get(i),
- installedList.get(i), async);
+ retExt = modifyEntryInternal(toInstallList.get(i), installedList.get(i), async);
if (retExt.isSuccess()) {
j++;
} else {
succeeded = retModify;
}
/*
- * The first successful status response will be returned.
- * For the asynchronous call, we can discard the container flow
- * complication for now and assume we will always deal with
- * one flow only per request
+ * The first successful status response will be returned. For the
+ * asynchronous call, we can discard the container flow complication for
+ * now and assume we will always deal with one flow only per request
*/
return succeeded;
}
* @param newEntries
* @param async
* the flag indicating if this is a asynchronous request
- * @return the status of this request. In case of asynchronous call, it
- * will contain the unique id assigned to this request
+ * @return the status of this request. In case of asynchronous call, it will
+ * contain the unique id assigned to this request
*/
- private Status modifyEntryInternal(FlowEntryInstall currentEntries,
- FlowEntryInstall newEntries, boolean async) {
+ private Status modifyEntryInternal(FlowEntryInstall currentEntries, FlowEntryInstall newEntries, boolean async) {
// Modify the flow on the network node
- Status status = (async)?
- programmer.modifyFlowAsync(currentEntries.getNode(),
- currentEntries.getInstall().getFlow(), newEntries.getInstall()
- .getFlow()) :
- programmer.modifyFlow(currentEntries.getNode(),
- currentEntries.getInstall().getFlow(), newEntries.getInstall()
- .getFlow());
-
+ Status status = (async) ? programmer.modifyFlowAsync(currentEntries.getNode(), currentEntries.getInstall()
+ .getFlow(), newEntries.getInstall().getFlow()) : programmer.modifyFlow(currentEntries.getNode(),
+ currentEntries.getInstall().getFlow(), newEntries.getInstall().getFlow());
if (!status.isSuccess()) {
- log.warn(
- "SDN Plugin failed to program the flow: {}. The failure is: {}",
- newEntries.getInstall(), status.getDescription());
+ log.warn("SDN Plugin failed to program the flow: {}. The failure is: {}", newEntries.getInstall(),
+ status.getDescription());
return status;
}
- log.trace("Modified {} => {}", currentEntries.getInstall(),
- newEntries.getInstall());
+ log.trace("Modified {} => {}", currentEntries.getInstall(), newEntries.getInstall());
// Update DB
newEntries.setRequestId(status.getRequestId());
* (entry or node not present), it return successfully
*
* @param flowEntry
- * the flow entry to remove
+ * the flow entry to remove
* @param async
* the flag indicating if this is a asynchronous request
- * @return the status of this request. In case of asynchronous call, it
- * will contain the unique id assigned to this request
+ * @return the status of this request. In case of asynchronous call, it will
+ * contain the unique id assigned to this request
*/
- private synchronized Status removeEntry(FlowEntry flowEntry, boolean async) {
+ private Status removeEntry(FlowEntry flowEntry, boolean async) {
Status error = new Status(null, null);
// Sanity Check
}
// Derive the container flows merged installed entries
- List<FlowEntryInstall> installedList = deriveInstallEntries(
- flowEntry.clone(), container.getContainerFlows());
+ List<FlowEntryInstall> installedList = deriveInstallEntries(flowEntry.clone(), container.getContainerFlows());
- Set<FlowEntryInstall> flowsOnNode = nodeFlows.get(flowEntry.getNode());
Status succeeded = null;
boolean atLeastOneRemoved = false;
for (FlowEntryInstall entry : installedList) {
- if (flowsOnNode == null) {
- String msg = "Removal skipped (Node down) for flow entry";
- String logMsg = msg + ": {}";
- log.debug(logMsg, flowEntry);
- return new Status(StatusCode.SUCCESS, msg);
- }
- if (!flowsOnNode.contains(entry)) {
+ if (!installedSwView.containsKey(entry)) {
String logMsg = "Removal skipped (not present in software view) for flow entry: {}";
log.debug(logMsg, flowEntry);
if (installedList.size() == 1) {
if (!ret.isSuccess()) {
error = ret;
- log.warn("Failed to remove the entry: {}. The failure is: {}",
- entry.getInstall(), ret.getDescription());
+ log.warn("Failed to remove the entry: {}. The failure is: {}", entry.getInstall(), ret.getDescription());
if (installedList.size() == 1) {
// If we had only one entry to remove, this is fatal failure
return error;
* the flow entry to remove
* @param async
* the flag indicating if this is a asynchronous request
- * @return the status of this request. In case of asynchronous call, it
- * will contain the unique id assigned to this request
+ * @return the status of this request. In case of asynchronous call, it will
+ * contain the unique id assigned to this request
*/
private Status removeEntryInternal(FlowEntryInstall entry, boolean async) {
// Mark the entry to be deleted (for CC just in case we fail)
entry.toBeDeleted();
// Remove from node
- Status status = (async)?
- programmer.removeFlowAsync(entry.getNode(), entry
- .getInstall().getFlow()) :
- programmer.removeFlow(entry.getNode(), entry
- .getInstall().getFlow());
-
+ Status status = (async) ? programmer.removeFlowAsync(entry.getNode(), entry.getInstall().getFlow())
+ : programmer.removeFlow(entry.getNode(), entry.getInstall().getFlow());
if (!status.isSuccess()) {
- log.warn(
- "SDN Plugin failed to program the flow: {}. The failure is: {}",
- entry.getInstall(), status.getDescription());
+ log.warn("SDN Plugin failed to program the flow: {}. The failure is: {}", entry.getInstall(),
+ status.getDescription());
return status;
}
log.trace("Removed {}", entry.getInstall());
* the flow entry to install
* @param async
* the flag indicating if this is a asynchronous request
- * @return the status of this request. In case of asynchronous call, it
- * will contain the unique id assigned to this request
+ * @return the status of this request. In case of asynchronous call, it will
+ * contain the unique id assigned to this request
*/
private Status addEntriesInternal(FlowEntryInstall entry, boolean async) {
// Install the flow on the network node
- Status status = (async)?
- programmer.addFlowAsync(entry.getNode(), entry.getInstall()
- .getFlow()) :
- programmer.addFlow(entry.getNode(), entry.getInstall()
- .getFlow());
-
+ Status status = (async) ? programmer.addFlowAsync(entry.getNode(), entry.getInstall().getFlow()) : programmer
+ .addFlow(entry.getNode(), entry.getInstall().getFlow());
if (!status.isSuccess()) {
- log.warn(
- "SDN Plugin failed to program the flow: {}. The failure is: {}",
- entry.getInstall(), status.getDescription());
+ log.warn("SDN Plugin failed to program the flow: {}. The failure is: {}", entry.getInstall(),
+ status.getDescription());
return status;
}
return true;
}
- private synchronized void updateLocalDatabase(FlowEntryInstall entry,
- boolean add) {
+ private void updateLocalDatabase(FlowEntryInstall entry, boolean add) {
+ // Update the software view
+ updateSwViewes(entry, add);
+
// Update node indexed flow database
updateNodeFlowsDB(entry, add);
/*
* Update the node mapped flows database
*/
- private synchronized void updateNodeFlowsDB(FlowEntryInstall flowEntries, boolean add) {
+ private void updateSwViewes(FlowEntryInstall flowEntries, boolean add) {
+ if (add) {
+ originalSwView.put(flowEntries.getOriginal(), flowEntries.getOriginal());
+ installedSwView.put(flowEntries, flowEntries);
+ } else {
+ originalSwView.remove(flowEntries.getOriginal());
+ installedSwView.remove(flowEntries);
+ }
+ }
+
+ /*
+ * Update the node mapped flows database
+ */
+ private void updateNodeFlowsDB(FlowEntryInstall flowEntries, boolean add) {
Node node = flowEntries.getNode();
- Set<FlowEntryInstall> flowEntrylist = this.nodeFlows.get(node);
- if (flowEntrylist == null) {
- if (add == false) {
+ List<FlowEntryInstall> nodeIndeces = this.nodeFlows.get(node);
+ if (nodeIndeces == null) {
+ if (!add) {
return;
} else {
- flowEntrylist = new HashSet<FlowEntryInstall>();
+ nodeIndeces = new ArrayList<FlowEntryInstall>();
}
}
- if (add == true) {
- flowEntrylist.add(flowEntries);
+ if (add) {
+ nodeIndeces.add(flowEntries);
} else {
- flowEntrylist.remove(flowEntries);
+ nodeIndeces.remove(flowEntries);
}
- if (flowEntrylist.isEmpty()) {
+ // Update cache across cluster
+ if (nodeIndeces.isEmpty()) {
this.nodeFlows.remove(node);
} else {
- this.nodeFlows.put(node, flowEntrylist);
+ this.nodeFlows.put(node, nodeIndeces);
}
}
* Update the group name mapped flows database
*/
private void updateGroupFlowsDB(FlowEntryInstall flowEntries, boolean add) {
- Set<FlowEntryInstall> flowList;
- FlowEntryInstall exists = null;
- String flowName = flowEntries.getFlowName();
String groupName = flowEntries.getGroupName();
- if (this.groupFlows == null) {
- return;
- }
-
// Flow may not be part of a group
if (groupName == null) {
return;
}
- if (this.groupFlows.containsKey(groupName)) {
- flowList = this.groupFlows.get(groupName);
- } else {
- if (add == false) {
+ List<FlowEntryInstall> indices = this.groupFlows.get(groupName);
+ if (indices == null) {
+ if (!add) {
return;
} else {
- flowList = new HashSet<FlowEntryInstall>();
- }
- }
-
- for (FlowEntryInstall flow : flowList) {
- if (flow.equalsByNodeAndName(flowEntries.getNode(), flowName)) {
- exists = flow;
- break;
+ indices = new ArrayList<FlowEntryInstall>();
}
}
- if (exists == null && add == false) {
- return;
- }
-
- if (exists != null) {
- flowList.remove(exists);
- }
-
- if (add == true) {
- flowList.add(flowEntries);
+ if (add) {
+ indices.add(flowEntries);
+ } else {
+ indices.remove(flowEntries);
}
- if (flowList.isEmpty()) {
+ // Update cache across cluster
+ if (indices.isEmpty()) {
this.groupFlows.remove(groupName);
} else {
- this.groupFlows.put(groupName, flowList);
+ this.groupFlows.put(groupName, indices);
}
}
* entry is effectively present in the local database
*/
@SuppressWarnings("unused")
- private synchronized Status removeEntry(Node node, String flowName) {
+ private Status removeEntry(Node node, String flowName) {
FlowEntryInstall target = null;
// Find in database
- for (FlowEntryInstall entry : this.nodeFlows.get(node)) {
+ for (FlowEntryInstall entry : installedSwView.values()) {
if (entry.equalsByNodeAndName(node, flowName)) {
target = entry;
break;
}
// Remove from node
- Status status = programmer.removeFlow(target.getNode(), target
- .getInstall().getFlow());
+ Status status = programmer.removeFlow(target.getNode(), target.getInstall().getFlow());
// Update DB
if (status.isSuccess()) {
updateLocalDatabase(target, false);
} else {
// log the error
- log.warn(
- "SDN Plugin failed to remove the flow: {}. The failure is: {}",
- target.getInstall(), status.getDescription());
+ log.warn("SDN Plugin failed to remove the flow: {}. The failure is: {}", target.getInstall(),
+ status.getDescription());
}
return status;
}
@Override
- public Status modifyFlowEntry(FlowEntry currentFlowEntry,
- FlowEntry newFlowEntry) {
+ public Status modifyFlowEntry(FlowEntry currentFlowEntry, FlowEntry newFlowEntry) {
Status status = null;
if (inContainerMode) {
String msg = "Controller in container mode: Modify Refused";
@Override
public Status modifyOrAddFlowEntry(FlowEntry newFlowEntry) {
/*
- * Run a loose check on the installed entries to decide whether to go
- * with a add or modify method. A loose check means only check against
- * the original flow entry requests and not against the installed flow
+ * Run a check on the original entries to decide whether to go with a
+ * add or modify method. A loose check means only check against the
+ * original flow entry requests and not against the installed flow
* entries which are the result of the original entry merged with the
* container flow(s) (if any). The modifyFlowEntry method in presence of
* conflicts with the Container flows (if any) would revert back to a
* delete + add pattern
*/
- FlowEntryInstall currentFlowEntries = findMatch(newFlowEntry, true);
+ FlowEntry currentFlowEntry = originalSwView.get(newFlowEntry);
- if (currentFlowEntries != null) {
- return modifyFlowEntry(currentFlowEntries.getOriginal(),
- newFlowEntry);
+ if (currentFlowEntry != null) {
+ return modifyFlowEntry(currentFlowEntry, newFlowEntry);
} else {
return installFlowEntry(newFlowEntry);
}
}
@Override
- public Status modifyOrAddFlowEntryAsync(FlowEntry newone) {
+ public Status modifyOrAddFlowEntryAsync(FlowEntry newFlowEntry) {
/*
- * Run a loose check on the installed entries to decide whether to go
- * with a add or modify method. A loose check means only check against
- * the original flow entry requests and not against the installed flow
+ * Run a check on the original entries to decide whether to go with a
+ * add or modify method. A loose check means only check against the
+ * original flow entry requests and not against the installed flow
* entries which are the result of the original entry merged with the
* container flow(s) (if any). The modifyFlowEntry method in presence of
* conflicts with the Container flows (if any) would revert back to a
* delete + add pattern
*/
- FlowEntryInstall currentFlowEntries = findMatch(newone, true);
+ FlowEntry currentFlowEntry = originalSwView.get(newFlowEntry);
- if (currentFlowEntries != null) {
- return modifyFlowEntryAsync(currentFlowEntries.getOriginal(),
- newone);
+ if (currentFlowEntry != null) {
+ return modifyFlowEntryAsync(currentFlowEntry, newFlowEntry);
} else {
- return installFlowEntryAsync(newone);
+ return installFlowEntryAsync(newFlowEntry);
}
}
-
- /**
- * Try to find in the database if a Flow with the same Match and priority of
- * the passed one already exists for the specified network node. Flow,
- * priority and network node are all specified in the FlowEntry If found,
- * the respective FlowEntryInstall Object is returned
- *
- * @param flowEntry
- * the FlowEntry to be tested against the ones installed
- * @param looseCheck
- * if true, the function will run the check against the original
- * flow entry portion of the installed entries
- * @return null if not found, otherwise the FlowEntryInstall which contains
- * the existing flow entry
- */
- private synchronized FlowEntryInstall findMatch(FlowEntry flowEntry, boolean looseCheck) {
- Flow flow = flowEntry.getFlow();
- Match match = flow.getMatch();
- short priority = flow.getPriority();
- Set<FlowEntryInstall> thisNodeList = nodeFlows.get(flowEntry.getNode());
-
- if (thisNodeList != null) {
- for (FlowEntryInstall flowEntries : thisNodeList) {
- flow = (looseCheck == false) ? flowEntries.getInstall()
- .getFlow() : flowEntries.getOriginal().getFlow();
- if (flow.getMatch().equals(match)
- && flow.getPriority() == priority) {
- return flowEntries;
+ @Override
+ public Status uninstallFlowEntryGroup(String groupName) {
+ if (groupName == null || groupName.isEmpty()) {
+ return new Status(StatusCode.BADREQUEST, "Invalid group name");
+ }
+ if (groupName.equals(FlowConfig.internalStaticFlowsGroup)) {
+ return new Status(StatusCode.BADREQUEST, "Static flows group cannot be deleted through this api");
+ }
+ if (inContainerMode) {
+ String msg = "Controller in container mode: Group Uninstall Refused";
+ String logMsg = msg + ": {}";
+ log.warn(logMsg, groupName);
+ return new Status(StatusCode.NOTACCEPTABLE, msg);
+ }
+ int toBeRemoved = groupFlows.get(groupName).size();
+ String error = "";
+ if (groupFlows.containsKey(groupName)) {
+ List<FlowEntryInstall> list = new ArrayList<FlowEntryInstall>(groupFlows.get(groupName));
+ for (FlowEntryInstall entry : list) {
+ Status status = this.removeEntry(entry.getOriginal(), false);
+ if (status.isSuccess()) {
+ toBeRemoved -= 1;
+ } else {
+ error = status.getDescription();
}
}
}
- return null;
+ return (toBeRemoved == 0) ? new Status(StatusCode.SUCCESS) : new Status(StatusCode.INTERNALERROR,
+ "Not all the flows were removed: " + error);
+ }
+
+ @Override
+ public Status uninstallFlowEntryGroupAsync(String groupName) {
+ if (groupName == null || groupName.isEmpty()) {
+ return new Status(StatusCode.BADREQUEST, "Invalid group name");
+ }
+ if (groupName.equals(FlowConfig.internalStaticFlowsGroup)) {
+ return new Status(StatusCode.BADREQUEST, "Static flows group cannot be deleted through this api");
+ }
+ if (inContainerMode) {
+ String msg = "Controller in container mode: Group Uninstall Refused";
+ String logMsg = msg + ": {}";
+ log.warn(logMsg, groupName);
+ return new Status(StatusCode.NOTACCEPTABLE, msg);
+ }
+ if (groupFlows.containsKey(groupName)) {
+ List<FlowEntryInstall> list = new ArrayList<FlowEntryInstall>(groupFlows.get(groupName));
+ for (FlowEntryInstall entry : list) {
+ this.removeEntry(entry.getOriginal(), true);
+ }
+ }
+ return new Status(StatusCode.SUCCESS);
}
+ @Override
public boolean checkFlowEntryConflict(FlowEntry flowEntry) {
return entryConflictsWithContainerFlows(flowEntry);
}
* merged flow may conflict with an existing old container flows merged flow
* on the network node
*/
- private synchronized void updateFlowsContainerFlow() {
- List<FlowEntryInstall> oldCouples = new ArrayList<FlowEntryInstall>();
- List<FlowEntry> toReinstall = new ArrayList<FlowEntry>();
- for (Entry<Node, Set<FlowEntryInstall>> entry : this.nodeFlows
- .entrySet()) {
- oldCouples.clear();
- toReinstall.clear();
- if (entry.getValue() == null) {
- continue;
- }
- // Create a set of old entries and one of original entries to be
- // reinstalled
- for (FlowEntryInstall oldCouple : entry.getValue()) {
- oldCouples.add(oldCouple);
- toReinstall.add(oldCouple.getOriginal());
- }
+ private void updateFlowsContainerFlow() {
+ for (ConcurrentMap.Entry<FlowEntryInstall, FlowEntryInstall> entry : installedSwView.entrySet()) {
+ FlowEntryInstall current = entry.getValue();
+ FlowEntry reInstall = current.getOriginal();
// Remove the old couples. No validity checks to be run, use the
// internal remove
- for (FlowEntryInstall oldCouple : oldCouples) {
- this.removeEntryInternal(oldCouple, false);
- }
+ this.removeEntryInternal(current, false);
+
// Reinstall the original flow entries, via the regular path: new
// cFlow merge + validations
- for (FlowEntry flowEntry : toReinstall) {
- this.installFlowEntry(flowEntry);
- }
+ this.installFlowEntry(reInstall);
}
}
public void nonClusterObjectCreate() {
- nodeFlows = new ConcurrentHashMap<Node, Set<FlowEntryInstall>>();
+ originalSwView = new ConcurrentHashMap<FlowEntry, FlowEntry>();
+ installedSwView = new ConcurrentHashMap<FlowEntryInstall, FlowEntryInstall>();
+ nodeFlows = new ConcurrentHashMap<Node, List<FlowEntryInstall>>();
+ groupFlows = new ConcurrentHashMap<String, List<FlowEntryInstall>>();
TSPolicies = new ConcurrentHashMap<String, Object>();
- groupFlows = new ConcurrentHashMap<String, Set<FlowEntryInstall>>();
staticFlowsOrdinal = new ConcurrentHashMap<Integer, Integer>();
portGroupConfigs = new ConcurrentHashMap<String, PortGroupConfig>();
portGroupData = new ConcurrentHashMap<PortGroupConfig, Map<Node, PortGroup>>();
}
private void registerWithOSGIConsole() {
- BundleContext bundleContext = FrameworkUtil.getBundle(this.getClass())
- .getBundleContext();
- bundleContext.registerService(CommandProvider.class.getName(), this,
- null);
+ BundleContext bundleContext = FrameworkUtil.getBundle(this.getClass()).getBundleContext();
+ bundleContext.registerService(CommandProvider.class.getName(), this, null);
}
@Override
@Override
public List<FlowEntry> getFlowEntriesForGroup(String policyName) {
List<FlowEntry> list = new ArrayList<FlowEntry>();
- if (this.groupFlows != null && this.groupFlows.containsKey(policyName)) {
- for (FlowEntryInstall entries : groupFlows.get(policyName)) {
- list.add(entries.getOriginal());
+ if (policyName != null && !policyName.trim().isEmpty()) {
+ for (Map.Entry<FlowEntry, FlowEntry> entry : this.originalSwView.entrySet()) {
+ if (policyName.equals(entry.getKey().getGroupName())) {
+ list.add(entry.getKey().clone());
+ }
}
}
return list;
}
@Override
- public synchronized void addOutputPort(Node node, String flowName,
- List<NodeConnector> portList) {
-
- Set<FlowEntryInstall> flowEntryList = this.nodeFlows.get(node);
+ public void addOutputPort(Node node, String flowName, List<NodeConnector> portList) {
- for (FlowEntryInstall flow : flowEntryList) {
+ for (FlowEntryInstall flow : this.nodeFlows.get(node)) {
if (flow.getFlowName().equals(flowName)) {
FlowEntry currentFlowEntry = flow.getOriginal();
FlowEntry newFlowEntry = currentFlowEntry.clone();
}
Status error = modifyEntry(currentFlowEntry, newFlowEntry, false);
if (error.isSuccess()) {
- log.info("Ports {} added to FlowEntry {}", portList,
- flowName);
+ log.info("Ports {} added to FlowEntry {}", portList, flowName);
} else {
- log.warn(
- "Failed to add ports {} to Flow entry {}. The failure is: {}",
- portList, currentFlowEntry.toString(),
- error.getDescription());
+ log.warn("Failed to add ports {} to Flow entry {}. The failure is: {}", portList,
+ currentFlowEntry.toString(), error.getDescription());
}
return;
}
}
- log.warn("Failed to add ports to Flow {} on Node {}: Entry Not Found",
- flowName, node);
+ log.warn("Failed to add ports to Flow {} on Node {}: Entry Not Found", flowName, node);
}
@Override
- public synchronized void removeOutputPort(Node node, String flowName,
- List<NodeConnector> portList) {
-
- Set<FlowEntryInstall> flowEntryList = this.nodeFlows.get(node);
-
- for (FlowEntryInstall flow : flowEntryList) {
+ public void removeOutputPort(Node node, String flowName, List<NodeConnector> portList) {
+ for (FlowEntryInstall index : this.nodeFlows.get(node)) {
+ FlowEntryInstall flow = this.installedSwView.get(index);
if (flow.getFlowName().equals(flowName)) {
FlowEntry currentFlowEntry = flow.getOriginal();
FlowEntry newFlowEntry = currentFlowEntry.clone();
}
Status status = modifyEntry(currentFlowEntry, newFlowEntry, false);
if (status.isSuccess()) {
- log.info("Ports {} removed from FlowEntry {}", portList,
- flowName);
+ log.info("Ports {} removed from FlowEntry {}", portList, flowName);
} else {
- log.warn(
- "Failed to remove ports {} from Flow entry {}. The failure is: {}",
- portList, currentFlowEntry.toString(),
- status.getDescription());
+ log.warn("Failed to remove ports {} from Flow entry {}. The failure is: {}", portList,
+ currentFlowEntry.toString(), status.getDescription());
}
return;
}
}
- log.warn(
- "Failed to remove ports from Flow {} on Node {}: Entry Not Found",
- flowName, node);
+ log.warn("Failed to remove ports from Flow {} on Node {}: Entry Not Found", flowName, node);
}
/*
* This function assumes the target flow has only one output port
*/
@Override
- public synchronized void replaceOutputPort(Node node, String flowName,
- NodeConnector outPort) {
+ public void replaceOutputPort(Node node, String flowName, NodeConnector outPort) {
FlowEntry currentFlowEntry = null;
FlowEntry newFlowEntry = null;
- Set<FlowEntryInstall> flowEntryList = this.nodeFlows.get(node);
// Find the flow
- for (FlowEntryInstall flow : flowEntryList) {
+ for (FlowEntryInstall index : this.nodeFlows.get(node)) {
+ FlowEntryInstall flow = this.installedSwView.get(index);
if (flow.getFlowName().equals(flowName)) {
currentFlowEntry = flow.getOriginal();
break;
}
}
if (currentFlowEntry == null) {
- log.warn(
- "Failed to replace output port for flow {} on node {}: Entry Not Found",
- flowName, node);
+ log.warn("Failed to replace output port for flow {} on node {}: Entry Not Found", flowName, node);
return;
}
Status status = modifyEntry(currentFlowEntry, newFlowEntry, false);
if (status.isSuccess()) {
- log.info("Output port replaced with {} for flow {} on node {}",
- outPort, flowName, node);
+ log.info("Output port replaced with {} for flow {} on node {}", outPort, flowName, node);
} else {
- log.warn(
- "Failed to replace output port for flow {} on node {}. The failure is: {}",
- flowName, node, status.getDescription());
+ log.warn("Failed to replace output port for flow {} on node {}. The failure is: {}", flowName, node,
+ status.getDescription());
}
return;
}
@Override
- public synchronized NodeConnector getOutputPort(Node node, String flowName) {
- Set<FlowEntryInstall> flowEntryList = this.nodeFlows.get(node);
-
- for (FlowEntryInstall flow : flowEntryList) {
+ public NodeConnector getOutputPort(Node node, String flowName) {
+ for (FlowEntryInstall index : this.nodeFlows.get(node)) {
+ FlowEntryInstall flow = this.installedSwView.get(index);
if (flow.getFlowName().equals(flowName)) {
for (Action action : flow.getOriginal().getFlow().getActions()) {
if (action.getType() == ActionType.OUTPUT) {
}
}
}
-
return null;
}
log.debug("FRM allocateCaches for Container {}", container);
try {
+ clusterContainerService.createCache("frm.originalSwView",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+
+ clusterContainerService.createCache("frm.installedSwView",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+
clusterContainerService.createCache("frm.nodeFlows",
EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
} catch (CacheConfigException cce) {
- log.error("FRM CacheConfigException", cce);
+ log.error("FRM CacheConfigException");
} catch (CacheExistException cce) {
- log.error("FRM CacheExistException", cce);
+ log.error("FRM CacheExistException");
}
}
log.debug("FRM retrieveCaches for Container {}", container);
+ map = clusterContainerService.getCache("frm.originalSwView");
+ if (map != null) {
+ originalSwView = (ConcurrentMap<FlowEntry, FlowEntry>) map;
+ } else {
+ log.error("FRM Cache frm.originalSwView allocation failed for Container {}", container.getName());
+ }
+
+ map = clusterContainerService.getCache("frm.installedSwView");
+ if (map != null) {
+ installedSwView = (ConcurrentMap<FlowEntryInstall, FlowEntryInstall>) map;
+ } else {
+ log.error("FRM Cache frm.installedSwView allocation failed for Container {}", container.getName());
+ }
+
map = clusterContainerService.getCache("frm.nodeFlows");
if (map != null) {
- nodeFlows = (ConcurrentMap<Node, Set<FlowEntryInstall>>) map;
+ nodeFlows = (ConcurrentMap<Node, List<FlowEntryInstall>>) map;
} else {
- log.error(
- "FRM Cache frm.nodeFlows allocation failed for Container {}",
- container);
+ log.error("FRM Cache frm.nodeFlows allocation failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.groupFlows");
if (map != null) {
- groupFlows = (ConcurrentMap<String, Set<FlowEntryInstall>>) map;
+ groupFlows = (ConcurrentMap<String, List<FlowEntryInstall>>) map;
} else {
- log.error(
- "FRM Cache frm.groupFlows allocation failed for Container {}",
- container);
+ log.error("FRM Cache frm.groupFlows allocation failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.staticFlows");
if (map != null) {
staticFlows = (ConcurrentMap<Integer, FlowConfig>) map;
} else {
- log.error(
- "FRM Cache frm.staticFlows allocation failed for Container {}",
- container);
+ log.error("FRM Cache frm.staticFlows allocation failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.flowsSaveEvent");
if (map != null) {
flowsSaveEvent = (ConcurrentMap<Long, String>) map;
} else {
- log.error(
- "FRM Cache frm.flowsSaveEvent allocation failed for Container {}",
- container);
+ log.error("FRM Cache frm.flowsSaveEvent allocation failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.staticFlowsOrdinal");
if (map != null) {
staticFlowsOrdinal = (ConcurrentMap<Integer, Integer>) map;
} else {
- log.error(
- "FRM Cache frm.staticFlowsOrdinal allocation failed for Container {}",
- container);
+ log.error("FRM Cache frm.staticFlowsOrdinal allocation failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.portGroupConfigs");
if (map != null) {
portGroupConfigs = (ConcurrentMap<String, PortGroupConfig>) map;
} else {
- log.error(
- "FRM Cache frm.portGroupConfigs allocation failed for Container {}",
- container);
+ log.error("FRM Cache frm.portGroupConfigs allocation failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.portGroupData");
if (map != null) {
portGroupData = (ConcurrentMap<PortGroupConfig, Map<Node, PortGroup>>) map;
} else {
- log.error(
- "FRM Cache frm.portGroupData allocation failed for Container {}",
- container);
+ log.error("FRM Cache frm.portGroupData allocation failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.TSPolicies");
if (map != null) {
TSPolicies = (ConcurrentMap<String, Object>) map;
} else {
- log.error(
- "FRM Cache frm.TSPolicies allocation failed for Container {}",
- container);
+ log.error("FRM Cache frm.TSPolicies allocation failed for Container {}", container.getName());
}
}
- @SuppressWarnings("deprecation")
- private void destroyCaches() {
- if (this.clusterContainerService == null) {
- log.warn("Un-initialized clusterContainerService, can't destroy cache");
- return;
- }
-
- log.debug("FRM destroyCaches for Container {}", container);
- clusterContainerService.destroyCache("frm.nodeFlows");
- clusterContainerService.destroyCache("frm.TSPolicies");
- clusterContainerService.destroyCache("frm.groupFlows");
- clusterContainerService.destroyCache("frm.staticFlows");
- clusterContainerService.destroyCache("frm.flowsSaveEvent");
- clusterContainerService.destroyCache("frm.staticFlowsOrdinal");
- clusterContainerService.destroyCache("frm.portGroupData");
- clusterContainerService.destroyCache("frm.portGroupConfigs");
- nonClusterObjectCreate();
- }
-
private boolean flowConfigExists(FlowConfig config) {
- // As per customer requirement, flow name has to be unique on per node
- // id basis
- for (FlowConfig fc : staticFlows.values()) {
- if (fc.isByNameAndNodeIdEqual(config)) {
+ // Flow name has to be unique on per node id basis
+ for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
+ if (entry.getValue().isByNameAndNodeIdEqual(config)) {
return true;
}
}
@Override
public Status addStaticFlow(FlowConfig config, boolean restore) {
- StringBuffer resultStr = new StringBuffer();
boolean multipleFlowPush = false;
String error;
Status status;
- config.setStatus(StatusCode.SUCCESS.toString());
+ config.setStatus(SUCCESS);
+
+ // Skip validation check if we are trying to restore a saved config
+ if (!restore && !(status = config.validate(container)).isSuccess()) {
+ log.warn("Invalid Configuration for flow {}. The failure is {}", config, status.getDescription());
+ error = "Invalid Configuration (" + status.getDescription() + ")";
+ config.setStatus(error);
+ return new Status(StatusCode.BADREQUEST, error);
+ }
// Presence check
if (flowConfigExists(config)) {
error = "Entry with this name on specified switch already exists";
- log.warn(
- "Entry with this name on specified switch already exists: {}",
- config);
+ log.warn("Entry with this name on specified switch already exists: {}", config);
config.setStatus(error);
return new Status(StatusCode.CONFLICT, error);
}
- // Skip validation check if we are trying to restore a saved config
- if (!restore && !config.isValid(container, resultStr)) {
- log.warn("Invalid Configuration for flow {}. The failure is {}",
- config, resultStr.toString());
- error = "Invalid Configuration (" + resultStr.toString() + ")";
- config.setStatus(error);
- return new Status(StatusCode.BADREQUEST, error);
- }
-
if ((config.getIngressPort() == null) && config.getPortGroup() != null) {
for (String portGroupName : portGroupConfigs.keySet()) {
if (portGroupName.equalsIgnoreCase(config.getPortGroup())) {
}
}
if (!multipleFlowPush) {
- log.warn(
- "Invalid Configuration(Invalid PortGroup Name) for flow {}",
- config);
+ log.warn("Invalid Configuration(Invalid PortGroup Name) for flow {}", config);
error = "Invalid Configuration (Invalid PortGroup Name)";
config.setStatus(error);
return new Status(StatusCode.BADREQUEST, error);
// Program hw
if (config.installInHw()) {
FlowEntry entry = config.getFlowEntry();
- status = this.addEntry(entry, false);
+ status = this.installFlowEntry(entry);
if (!status.isSuccess()) {
config.setStatus(status.getDescription());
if (!restore) {
staticFlows.put(ordinal, config);
if (multipleFlowPush) {
- PortGroupConfig pgconfig = portGroupConfigs.get(config
- .getPortGroup());
+ PortGroupConfig pgconfig = portGroupConfigs.get(config.getPortGroup());
Map<Node, PortGroup> existingData = portGroupData.get(pgconfig);
if (existingData != null) {
portGroupChanged(pgconfig, existingData, true);
}
private void addStaticFlowsToSwitch(Node node) {
- for (FlowConfig config : staticFlows.values()) {
+ for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
+ FlowConfig config = entry.getValue();
if (config.isPortGroupEnabled()) {
continue;
}
if (config.getNode().equals(node)) {
- if (config.installInHw()
- && !config.getStatus().equals(
- StatusCode.SUCCESS.toString())) {
- Status status = this.addEntry(config.getFlowEntry(), false);
+ if (config.installInHw() && !config.getStatus().equals(SUCCESS)) {
+ Status status = this.installFlowEntryAsync(config.getFlowEntry());
config.setStatus(status.getDescription());
}
}
}
+ // Update cluster cache
+ refreshClusterStaticFlowsStatus(node);
}
private void updateStaticFlowConfigsOnNodeDown(Node node) {
for (Integer index : toRemove) {
staticFlows.remove(index);
}
+ // Update cluster cache
+ refreshClusterStaticFlowsStatus(node);
+
}
private void updateStaticFlowConfigsOnContainerModeChange(UpdateType update) {
- log.trace("Updating Static Flow configs on container mode change: {}",
- update);
+ log.trace("Updating Static Flow configs on container mode change: {}", update);
- for (FlowConfig config : staticFlows.values()) {
+ for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
+ FlowConfig config = entry.getValue();
if (config.isPortGroupEnabled()) {
continue;
}
- if (config.installInHw()) {
+ if (config.installInHw() && !config.isInternalFlow()) {
switch (update) {
case ADDED:
config.setStatus("Removed from node because in container mode");
break;
case REMOVED:
- config.setStatus(StatusCode.SUCCESS.toString());
+ config.setStatus(SUCCESS);
break;
default:
}
}
}
+ // Update cluster cache
+ refreshClusterStaticFlowsStatus(null);
}
+ @Override
public Status removeStaticFlow(FlowConfig config) {
/*
* No config.isInternal() check as NB does not take this path and GUI
* reactive, so that we can remove the internal generated LLDP and ARP
* punt flows
*/
- for (Map.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
+
+ // Look for the target configuration entry
+ Integer key = 0;
+ FlowConfig target = null;
+ for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
if (entry.getValue().isByNameAndNodeIdEqual(config)) {
- // Program the network node
- Status status = this.removeEntry(config.getFlowEntry(), false);
- // Update configuration database if programming was successful
- if (status.isSuccess()) {
- staticFlows.remove(entry.getKey());
- return status;
- } else {
- entry.getValue().setStatus(status.getDescription());
- return status;
- }
+ key = entry.getKey();
+ target = entry.getValue();
+ break;
}
}
- return new Status(StatusCode.NOTFOUND, "Entry Not Present");
+ if (target == null) {
+ return new Status(StatusCode.NOTFOUND, "Entry Not Present");
+ }
+
+ // Program the network node
+ Status status = this.removeEntry(config.getFlowEntry(), false);
+
+ // Update configuration database if programming was successful
+ if (status.isSuccess()) {
+ staticFlows.remove(key);
+ }
+
+ return status;
}
@Override
public Status removeStaticFlow(String name, Node node) {
- for (Map.Entry<Integer, FlowConfig> mapEntry : staticFlows.entrySet()) {
- FlowConfig entry = mapEntry.getValue();
- Status status = new Status(null, null);
- if (entry.isByNameAndNodeIdEqual(name, node)) {
- // Validity check for api3 entry point
- if (entry.isInternalFlow()) {
- String msg = "Invalid operation: Controller generated "
- + "flow cannot be deleted";
- String logMsg = msg + ": {}";
- log.warn(logMsg, name);
- return new Status(StatusCode.NOTACCEPTABLE, msg);
- }
- if (!entry.isPortGroupEnabled()) {
- // Program the network node
- status = this.removeEntry(entry.getFlowEntry(), false);
- }
- // Update configuration database if programming was successful
- if (status.isSuccess()) {
- staticFlows.remove(mapEntry.getKey());
- return status;
- } else {
- entry.setStatus(status.getDescription());
- return status;
- }
+ // Look for the target configuration entry
+ Integer key = 0;
+ FlowConfig target = null;
+ for (ConcurrentMap.Entry<Integer, FlowConfig> mapEntry : staticFlows.entrySet()) {
+ if (mapEntry.getValue().isByNameAndNodeIdEqual(name, node)) {
+ key = mapEntry.getKey();
+ target = mapEntry.getValue();
+ break;
}
}
- return new Status(StatusCode.NOTFOUND, "Entry Not Present");
+ if (target == null) {
+ return new Status(StatusCode.NOTFOUND, "Entry Not Present");
+ }
+
+ // Validity check for api3 entry point
+ if (target.isInternalFlow()) {
+ String msg = "Invalid operation: Controller generated flow cannot be deleted";
+ String logMsg = msg + ": {}";
+ log.warn(logMsg, name);
+ return new Status(StatusCode.NOTACCEPTABLE, msg);
+ }
+
+ if (target.isPortGroupEnabled()) {
+ String msg = "Invalid operation: Port Group flows cannot be deleted through this API";
+ String logMsg = msg + ": {}";
+ log.warn(logMsg, name);
+ return new Status(StatusCode.NOTACCEPTABLE, msg);
+ }
+
+ // Program the network node
+ Status status = this.removeEntry(target.getFlowEntry(), false);
+
+ // Update configuration database if programming was successful
+ if (status.isSuccess()) {
+ staticFlows.remove(key);
+ }
+
+ return status;
}
+ @Override
public Status modifyStaticFlow(FlowConfig newFlowConfig) {
// Validity check for api3 entry point
if (newFlowConfig.isInternalFlow()) {
- String msg = "Invalid operation: Controller generated flow "
- + "cannot be modified";
+ String msg = "Invalid operation: Controller generated flow cannot be modified";
String logMsg = msg + ": {}";
log.warn(logMsg, newFlowConfig);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
// Validity Check
- StringBuffer resultStr = new StringBuffer();
- if (!newFlowConfig.isValid(container, resultStr)) {
- String msg = "Invalid Configuration (" + resultStr.toString() + ")";
+ Status status = newFlowConfig.validate(container);
+ if (!status.isSuccess()) {
+ String msg = "Invalid Configuration (" + status.getDescription() + ")";
newFlowConfig.setStatus(msg);
- log.warn("Invalid Configuration for flow {}. The failure is {}",
- newFlowConfig, resultStr.toString());
+ log.warn("Invalid Configuration for flow {}. The failure is {}", newFlowConfig, status.getDescription());
return new Status(StatusCode.BADREQUEST, msg);
}
FlowConfig oldFlowConfig = null;
Integer index = null;
- for (Map.Entry<Integer, FlowConfig> mapEntry : staticFlows.entrySet()) {
+ for (ConcurrentMap.Entry<Integer, FlowConfig> mapEntry : staticFlows.entrySet()) {
FlowConfig entry = mapEntry.getValue();
- if (entry.isByNameAndNodeIdEqual(newFlowConfig.getName(),
- newFlowConfig.getNode())) {
+ if (entry.isByNameAndNodeIdEqual(newFlowConfig.getName(), newFlowConfig.getNode())) {
oldFlowConfig = entry;
index = mapEntry.getKey();
break;
// Do not attempt to reinstall the flow, warn user
if (newFlowConfig.equals(oldFlowConfig)) {
String msg = "No modification detected";
- log.info(
- "Static flow modification skipped. New flow and old flow are the same: {}",
- newFlowConfig);
+ log.info("Static flow modification skipped. New flow and old flow are the same: {}", newFlowConfig);
return new Status(StatusCode.SUCCESS, msg);
}
// If flow is installed, program the network node
- Status status = new Status(StatusCode.SUCCESS, "Saved in config");
+ status = new Status(StatusCode.SUCCESS, "Saved in config");
if (oldFlowConfig.installInHw()) {
- status = this.modifyEntry(oldFlowConfig.getFlowEntry(),
- newFlowConfig.getFlowEntry(), false);
+ status = this.modifyEntry(oldFlowConfig.getFlowEntry(), newFlowConfig.getFlowEntry(), false);
}
// Update configuration database if programming was successful
}
// Validity check for api3 entry point
if (config.isInternalFlow()) {
- String msg = "Invalid operation: Controller generated flow "
- + "cannot be modified";
+ String msg = "Invalid operation: Controller generated flow cannot be modified";
String logMsg = msg + ": {}";
log.warn(logMsg, config);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
+ // Find the config entry
+ Integer key = 0;
+ FlowConfig target = null;
for (Map.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
FlowConfig conf = entry.getValue();
if (conf.isByNameAndNodeIdEqual(config)) {
- // Program the network node
- Status status = new Status(StatusCode.SUCCESS);
- if (conf.installInHw()) {
- status = this.removeEntry(conf.getFlowEntry(), false);
- } else {
- status = this.addEntry(conf.getFlowEntry(), false);
- }
- if (!status.isSuccess()) {
- conf.setStatus(status.getDescription());
- return status;
- }
+ key = entry.getKey();
+ target = conf;
+ break;
+ }
+ }
+ if (target != null) {
+ // Program the network node
+ Status status;
+ if (target.installInHw()) {
+ status = this.removeEntry(target.getFlowEntry(), false);
+ } else {
+ status = this.addEntry(target.getFlowEntry(), false);
+ }
+ // Update Configuration database
+ target.setStatus(SUCCESS);
+ target.toggleInstallation();
+ staticFlows.put(key, target);
+ return status;
+ }
+
+ return new Status(StatusCode.NOTFOUND, "Unable to locate the entry. Failed to toggle status");
+ }
- // Update Configuration database
- conf.setStatus(StatusCode.SUCCESS.toString());
- conf.toggleStatus();
- return status;
+ /**
+ * Reinsert all static flows entries in the cache to force cache updates in
+ * the cluster. This is useful when only some parameters were changed in the
+ * entries, like the status.
+ *
+ * @param node
+ * The node for which the static flow configurations have to be
+ * refreshed. If null, all nodes static flows will be refreshed.
+ */
+ private void refreshClusterStaticFlowsStatus(Node node) {
+ // Refresh cluster cache
+ for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
+ if (node == null || entry.getValue().getNode().equals(node)) {
+ staticFlows.put(entry.getKey(), entry.getValue());
}
}
- return new Status(StatusCode.NOTFOUND,
- "Unable to locate the entry. Failed to toggle status");
}
/**
- * Uninstall all the Flow Entries present in the software view A copy of
- * each entry is stored in the inactive list so that it can be re-applied
- * when needed This function is called on the default container instance of
- * FRM only when the first container is created
+ * Uninstall all the non-internal Flow Entries present in the software view.
+ * A copy of each entry is stored in the inactive list so that it can be
+ * re-applied when needed. This function is called on the global instance of
+ * FRM only, when the first container is created
*/
private void uninstallAllFlowEntries() {
- log.info("Uninstalling all flows");
+ log.info("Uninstalling all non-internal flows");
// Store entries / create target list
- for (ConcurrentMap.Entry<Node, Set<FlowEntryInstall>> mapEntry : nodeFlows
- .entrySet()) {
- for (FlowEntryInstall flowEntries : mapEntry.getValue()) {
+ for (ConcurrentMap.Entry<FlowEntryInstall, FlowEntryInstall> mapEntry : installedSwView.entrySet()) {
+ FlowEntryInstall flowEntries = mapEntry.getValue();
+ // Skip internal generated static flows
+ if (!flowEntries.isInternal()) {
inactiveFlows.add(flowEntries.getOriginal());
}
}
for (FlowEntry flowEntry : inactiveFlows) {
Status status = this.removeEntry(flowEntry, false);
if (!status.isSuccess()) {
- log.warn("Failed to remove entry: {}. The failure is: {}",
- flowEntry, status.getDescription());
+ log.warn("Failed to remove entry: {}. The failure is: {}", flowEntry, status.getDescription());
}
}
}
log.info("Reinstalling all inactive flows");
for (FlowEntry flowEntry : this.inactiveFlows) {
- Status status = this.addEntry(flowEntry, false);
- if (!status.isSuccess()) {
- log.warn("Failed to install entry: {}. The failure is: {}",
- flowEntry, status.getDescription());
- }
+ this.addEntry(flowEntry, false);
}
// Empty inactive list in any case
inactiveFlows.clear();
}
+ @Override
public List<FlowConfig> getStaticFlows() {
- return getStaticFlowsOrderedList(staticFlows, staticFlowsOrdinal.get(0)
- .intValue());
+ return getStaticFlowsOrderedList(staticFlows, staticFlowsOrdinal.get(0).intValue());
}
- // TODO: need to come out with a better algorithm for mantaining the order
+ // TODO: need to come out with a better algorithm for maintaining the order
// of the configuration entries
// with actual one, index associated to deleted entries cannot be reused and
// map grows...
- private List<FlowConfig> getStaticFlowsOrderedList(
- ConcurrentMap<Integer, FlowConfig> flowMap, int maxKey) {
+ private List<FlowConfig> getStaticFlowsOrderedList(ConcurrentMap<Integer, FlowConfig> flowMap, int maxKey) {
List<FlowConfig> orderedList = new ArrayList<FlowConfig>();
for (int i = 0; i <= maxKey; i++) {
FlowConfig entry = flowMap.get(i);
@Override
public FlowConfig getStaticFlow(String name, Node node) {
- for (FlowConfig config : staticFlows.values()) {
- if (config.isByNameAndNodeIdEqual(name, node)) {
- return config;
+ for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
+ if (entry.getValue().isByNameAndNodeIdEqual(name, node)) {
+ return entry.getValue();
}
}
return null;
@Override
public List<FlowConfig> getStaticFlows(Node node) {
List<FlowConfig> list = new ArrayList<FlowConfig>();
- for (FlowConfig config : staticFlows.values()) {
- if (config.onNode(node)) {
- list.add(config);
+ for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
+ if (entry.getValue().onNode(node)) {
+ list.add(entry.getValue());
}
}
return list;
@Override
public List<String> getStaticFlowNamesForNode(Node node) {
List<String> list = new ArrayList<String>();
- for (FlowConfig config : staticFlows.values()) {
- if (config.onNode(node)) {
- list.add(config.getName());
+ for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
+ if (entry.getValue().onNode(node)) {
+ list.add(entry.getValue().getName());
}
}
return list;
@Override
public List<Node> getListNodeWithConfiguredFlows() {
Set<Node> set = new HashSet<Node>();
- for (FlowConfig config : staticFlows.values()) {
- set.add(config.getNode());
+ for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
+ set.add(entry.getValue().getNode());
}
return new ArrayList<Node>(set);
}
@SuppressWarnings("unchecked")
private void loadFlowConfiguration() {
ObjectReader objReader = new ObjectReader();
- ConcurrentMap<Integer, FlowConfig> confList = (ConcurrentMap<Integer, FlowConfig>) objReader
- .read(this, frmFileName);
+ ConcurrentMap<Integer, FlowConfig> confList = (ConcurrentMap<Integer, FlowConfig>) objReader.read(this,
+ frmFileName);
- ConcurrentMap<String, PortGroupConfig> pgConfig = (ConcurrentMap<String, PortGroupConfig>) objReader
- .read(this, portGroupFileName);
+ ConcurrentMap<String, PortGroupConfig> pgConfig = (ConcurrentMap<String, PortGroupConfig>) objReader.read(this,
+ portGroupFileName);
if (pgConfig != null) {
- for (Map.Entry<String, PortGroupConfig> entry : pgConfig.entrySet()) {
- addPortGroupConfig(entry.getKey(), entry.getValue()
- .getMatchString(), true);
+ for (ConcurrentMap.Entry<String, PortGroupConfig> entry : pgConfig.entrySet()) {
+ addPortGroupConfig(entry.getKey(), entry.getValue().getMatchString(), true);
}
}
int maxKey = 0;
for (Integer key : confList.keySet()) {
- if (key.intValue() > maxKey)
+ if (key.intValue() > maxKey) {
maxKey = key.intValue();
+ }
}
for (FlowConfig conf : getStaticFlowsOrderedList(confList, maxKey)) {
}
@Override
- public Object readObject(ObjectInputStream ois)
- throws FileNotFoundException, IOException, ClassNotFoundException {
+ public Object readObject(ObjectInputStream ois) throws FileNotFoundException, IOException, ClassNotFoundException {
return ois.readObject();
}
+ @Override
public Status saveConfig() {
// Publish the save config event to the cluster nodes
flowsSaveEvent.put(new Date().getTime(), SAVE);
private Status saveConfigInternal() {
ObjectWriter objWriter = new ObjectWriter();
- ConcurrentHashMap<Integer, FlowConfig> nonDynamicFlows = new ConcurrentHashMap<Integer, FlowConfig>();
+ ConcurrentMap<Integer, FlowConfig> nonDynamicFlows = new ConcurrentHashMap<Integer, FlowConfig>();
for (Integer ordinal : staticFlows.keySet()) {
FlowConfig config = staticFlows.get(ordinal);
// Do not save dynamic and controller generated static flows
nonDynamicFlows.put(ordinal, config);
}
objWriter.write(nonDynamicFlows, frmFileName);
- objWriter.write(new ConcurrentHashMap<String, PortGroupConfig>(
- portGroupConfigs), portGroupFileName);
+ objWriter.write(new ConcurrentHashMap<String, PortGroupConfig>(portGroupConfigs), portGroupFileName);
return new Status(StatusCode.SUCCESS, null);
}
}
@Override
- public void entryUpdated(Long key, String new_value, String cacheName,
- boolean originLocal) {
+ public void entryUpdated(Long key, String new_value, String cacheName, boolean originLocal) {
saveConfigInternal();
}
allowARP.setName("**Punt ARP Reply");
allowARP.setPriority("500");
allowARP.setNode(node);
- allowARP.setEtherType("0x"
- + Integer.toHexString(EtherTypes.ARP.intValue()).toUpperCase());
- allowARP.setDstMac(HexEncode.bytesToHexString(switchManager
- .getControllerMAC()));
+ allowARP.setEtherType("0x" + Integer.toHexString(EtherTypes.ARP.intValue()).toUpperCase());
+ allowARP.setDstMac(HexEncode.bytesToHexString(switchManager.getControllerMAC()));
allowARP.setActions(puntAction);
addStaticFlow(allowARP, false);
}
allowARP.setName("**Punt ARP");
allowARP.setPriority("1");
allowARP.setNode(node);
- allowARP.setEtherType("0x"
- + Integer.toHexString(EtherTypes.ARP.intValue()).toUpperCase());
+ allowARP.setEtherType("0x" + Integer.toHexString(EtherTypes.ARP.intValue()).toUpperCase());
allowARP.setActions(puntAction);
defaultConfigs.add(allowARP);
allowLLDP.setName("**Punt LLDP");
allowLLDP.setPriority("1");
allowLLDP.setNode(node);
- allowLLDP
- .setEtherType("0x"
- + Integer.toHexString(EtherTypes.LLDP.intValue())
- .toUpperCase());
+ allowLLDP.setEtherType("0x" + Integer.toHexString(EtherTypes.LLDP.intValue()).toUpperCase());
allowLLDP.setActions(puntAction);
defaultConfigs.add(allowLLDP);
}
}
- log.info("Set Switch {} Mode to {}", node, proactive);
+ log.info("Set Switch {} Mode to {}", node, (proactive ? "proactive" : "reactive"));
}
/**
*
* @param node
*/
- private synchronized void cleanDatabaseForNode(Node node) {
- log.info("Cleaning Flow database for Node {}", node.toString());
-
- // Find out which groups the node's flows are part of
- Set<String> affectedGroups = new HashSet<String>();
- Set<FlowEntryInstall> flowEntryList = nodeFlows.get(node);
- if (flowEntryList != null) {
- for (FlowEntryInstall entry : flowEntryList) {
- String groupName = entry.getGroupName();
- if (groupName != null) {
- affectedGroups.add(groupName);
- }
- }
- }
+ private void cleanDatabaseForNode(Node node) {
+ log.info("Cleaning Flow database for Node {}", node);
+ if (nodeFlows.containsKey(node)) {
+ List<FlowEntryInstall> toRemove = new ArrayList<FlowEntryInstall>(nodeFlows.get(node));
- // Remove the node's flows from the group indexed flow database
- if (!affectedGroups.isEmpty()) {
- for (String group : affectedGroups) {
- Set<FlowEntryInstall> flowList = groupFlows.get(group);
- Set<FlowEntryInstall> toRemove = new HashSet<FlowEntryInstall>();
- for (FlowEntryInstall entry : flowList) {
- if (node.equals(entry.getNode())) {
- toRemove.add(entry);
- }
- }
- flowList.removeAll(toRemove);
- if (flowList.isEmpty()) {
- groupFlows.remove(group);
- }
+ for (FlowEntryInstall entry : toRemove) {
+ updateLocalDatabase(entry, false);
}
}
-
- // Remove the node's flows from the node indexed flow database
- nodeFlows.remove(node);
}
@Override
- public void notifyNode(Node node, UpdateType type,
- Map<String, Property> propMap) {
- switch (type) {
- case ADDED:
- addStaticFlowsToSwitch(node);
- break;
- case REMOVED:
- cleanDatabaseForNode(node);
- updateStaticFlowConfigsOnNodeDown(node);
- break;
- default:
- break;
- }
+ public void notifyNode(Node node, UpdateType type, Map<String, Property> propMap) {
+ this.pendingEvents.offer(new NodeUpdateEvent(type, node));
}
@Override
- public void notifyNodeConnector(NodeConnector nodeConnector,
- UpdateType type, Map<String, Property> propMap) {
+ public void notifyNodeConnector(NodeConnector nodeConnector, UpdateType type, Map<String, Property> propMap) {
+
}
- private FlowConfig getDerivedFlowConfig(FlowConfig original,
- String configName, Short port) {
+ private FlowConfig getDerivedFlowConfig(FlowConfig original, String configName, Short port) {
FlowConfig derivedFlow = new FlowConfig(original);
derivedFlow.setDynamic(true);
derivedFlow.setPortGroup(null);
return derivedFlow;
}
- private void addPortGroupFlows(PortGroupConfig config, Node node,
- PortGroup data) {
- for (Iterator<FlowConfig> it = staticFlows.values().iterator(); it
- .hasNext();) {
- FlowConfig staticFlow = it.next();
+ private void addPortGroupFlows(PortGroupConfig config, Node node, PortGroup data) {
+ for (FlowConfig staticFlow : staticFlows.values()) {
if (staticFlow.getPortGroup() == null) {
continue;
}
- if ((staticFlow.getNode().equals(node))
- && (staticFlow.getPortGroup().equals(config.getName()))) {
+ if ((staticFlow.getNode().equals(node)) && (staticFlow.getPortGroup().equals(config.getName()))) {
for (Short port : data.getPorts()) {
- FlowConfig derivedFlow = getDerivedFlowConfig(staticFlow,
- config.getName(), port);
+ FlowConfig derivedFlow = getDerivedFlowConfig(staticFlow, config.getName(), port);
addStaticFlow(derivedFlow, false);
}
}
}
}
- private void removePortGroupFlows(PortGroupConfig config, Node node,
- PortGroup data) {
- for (Iterator<FlowConfig> it = staticFlows.values().iterator(); it
- .hasNext();) {
- FlowConfig staticFlow = it.next();
+ private void removePortGroupFlows(PortGroupConfig config, Node node, PortGroup data) {
+ for (FlowConfig staticFlow : staticFlows.values()) {
if (staticFlow.getPortGroup() == null) {
continue;
}
- if ((staticFlow.getNode().equals(node))
- && (staticFlow.getPortGroup().equals(config.getName()))) {
+ if (staticFlow.getNode().equals(node) && staticFlow.getPortGroup().equals(config.getName())) {
for (Short port : data.getPorts()) {
- FlowConfig derivedFlow = getDerivedFlowConfig(staticFlow,
- config.getName(), port);
+ FlowConfig derivedFlow = getDerivedFlowConfig(staticFlow, config.getName(), port);
removeStaticFlow(derivedFlow);
}
}
}
@Override
- public void portGroupChanged(PortGroupConfig config,
- Map<Node, PortGroup> data, boolean add) {
+ public void portGroupChanged(PortGroupConfig config, Map<Node, PortGroup> data, boolean add) {
log.info("PortGroup Changed for: {} Data: {}", config, portGroupData);
Map<Node, PortGroup> existingData = portGroupData.get(config);
if (existingData != null) {
if (existingPortGroup == null) {
if (add) {
existingData.put(entry.getKey(), entry.getValue());
- addPortGroupFlows(config, entry.getKey(),
- entry.getValue());
+ addPortGroupFlows(config, entry.getKey(), entry.getValue());
}
} else {
if (add) {
- existingPortGroup.getPorts().addAll(
- entry.getValue().getPorts());
- addPortGroupFlows(config, entry.getKey(),
- entry.getValue());
+ existingPortGroup.getPorts().addAll(entry.getValue().getPorts());
+ addPortGroupFlows(config, entry.getKey(), entry.getValue());
} else {
- existingPortGroup.getPorts().removeAll(
- entry.getValue().getPorts());
- removePortGroupFlows(config, entry.getKey(),
- entry.getValue());
+ existingPortGroup.getPorts().removeAll(entry.getValue().getPorts());
+ removePortGroupFlows(config, entry.getKey(), entry.getValue());
}
}
}
}
}
+ @Override
public boolean addPortGroupConfig(String name, String regex, boolean restore) {
PortGroupConfig config = portGroupConfigs.get(name);
- if (config != null)
+ if (config != null) {
return false;
+ }
if ((portGroupProvider == null) && !restore) {
return false;
}
- if ((portGroupProvider != null)
- && (!portGroupProvider.isMatchCriteriaSupported(regex))) {
+ if ((portGroupProvider != null) && (!portGroupProvider.isMatchCriteriaSupported(regex))) {
return false;
}
return true;
}
+ @Override
public boolean delPortGroupConfig(String name) {
PortGroupConfig config = portGroupConfigs.get(name);
if (config == null) {
return;
}
if (portGroupProvider != null) {
- Map<Node, PortGroup> data = portGroupProvider
- .getPortGroupData(config);
+ Map<Node, PortGroup> data = portGroupProvider.getPortGroupData(config);
portGroupData.put(config, data);
}
}
return true;
}
- // Fir PortGroupProvider to use regular Dependency Manager
- /* @SuppressWarnings("rawtypes") */
- /* public void bind(Object arg0, Map arg1) throws Exception { */
- /* if (arg0 instanceof PortGroupProvider) { */
- /* setPortGroupProvider((PortGroupProvider)arg0); */
- /* } */
- /* } */
-
- /* @SuppressWarnings("rawtypes") */
- /* @Override */
- /* public void unbind(Object arg0, Map arg1) throws Exception { */
- /* if (arg0 instanceof PortGroupProvider) { */
- /* portGroupProvider = null; */
- /* } */
- /* } */
-
public void setIContainer(IContainer s) {
this.container = s;
}
}
}
+ @Override
public PortGroupProvider getPortGroupProvider() {
return portGroupProvider;
}
*
*/
void init() {
- frmAware = Collections
- .synchronizedSet(new HashSet<IForwardingRulesManagerAware>());
- frmFileName = GlobalConstants.STARTUPHOME.toString()
- + "frm_staticflows_" + this.getContainerName() + ".conf";
- portGroupFileName = GlobalConstants.STARTUPHOME.toString()
- + "portgroup_" + this.getContainerName() + ".conf";
+ frmAware = Collections.synchronizedSet(new HashSet<IForwardingRulesManagerAware>());
+ frmFileName = GlobalConstants.STARTUPHOME.toString() + "frm_staticflows_" + this.getContainerName() + ".conf";
+ portGroupFileName = GlobalConstants.STARTUPHOME.toString() + "portgroup_" + this.getContainerName() + ".conf";
inContainerMode = false;
if (staticFlowsOrdinal.size() == 0) {
staticFlowsOrdinal.put(0, Integer.valueOf(0));
}
+
+ pendingEvents = new LinkedBlockingQueue<FRMEvent>();
+
+ // Initialize the event handler thread
+ frmEventHandler = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ while (!stopping) {
+ try {
+ FRMEvent event = pendingEvents.take();
+ if (event == null) {
+ log.warn("Dequeued null event");
+ continue;
+ }
+ if (event instanceof NodeUpdateEvent) {
+ NodeUpdateEvent update = (NodeUpdateEvent) event;
+ Node node = update.getNode();
+ switch (update.getUpdateType()) {
+ case ADDED:
+ addStaticFlowsToSwitch(node);
+ break;
+ case REMOVED:
+ cleanDatabaseForNode(node);
+ updateStaticFlowConfigsOnNodeDown(node);
+ break;
+ default:
+ }
+ } else if (event instanceof ErrorReportedEvent) {
+ ErrorReportedEvent errEvent = (ErrorReportedEvent) event;
+ processErrorEvent(errEvent);
+ } else {
+ log.warn("Dequeued unknown event {}", event.getClass().getSimpleName());
+ }
+ } catch (InterruptedException e) {
+ log.warn("FRM EventHandler thread interrupted", e);
+ }
+ }
+ }
+ }, "FRM EventHandler Collector");
}
/**
*
*/
void destroy() {
- destroyCaches();
}
/**
*
*/
void start() {
+ // Initialize graceful stop flag
+ stopping = false;
+
+ // Start event handler thread
+ frmEventHandler.start();
+
/*
* Read startup and build database if we have not already gotten the
* configurations synced from another node
*
*/
void stop() {
+ // Set graceful stop flag
+ stopping = true;
}
public void setFlowProgrammerService(IFlowProgrammerService service) {
}
@Override
- public void tagUpdated(String containerName, Node n, short oldTag,
- short newTag, UpdateType t) {
+ public void tagUpdated(String containerName, Node n, short oldTag, short newTag, UpdateType t) {
}
@Override
- public void containerFlowUpdated(String containerName,
- ContainerFlow previousFlow, ContainerFlow currentFlow, UpdateType t) {
+ public void containerFlowUpdated(String containerName, ContainerFlow previousFlow, ContainerFlow currentFlow,
+ UpdateType t) {
/*
* Whether it is an addition or removal, we have to recompute the merged
* flows entries taking into account all the current container flows
}
@Override
- public void nodeConnectorUpdated(String containerName, NodeConnector p,
- UpdateType t) {
+ public void nodeConnectorUpdated(String containerName, NodeConnector p, UpdateType t) {
// No action
}
updateStaticFlowConfigsOnContainerModeChange(update);
}
+ protected abstract class FRMEvent {
+
+ }
+
+ private class NodeUpdateEvent extends FRMEvent {
+ private final Node node;
+ private final UpdateType update;
+
+ public NodeUpdateEvent(UpdateType update, Node node) {
+ this.update = update;
+ this.node = node;
+ }
+
+ public UpdateType getUpdateType() {
+ return update;
+ }
+
+ public Node getNode() {
+ return node;
+ }
+ }
+
+ private class ErrorReportedEvent extends FRMEvent {
+ private final long rid;
+ private final Node node;
+ private final Object error;
+
+ public ErrorReportedEvent(long rid, Node node, Object error) {
+ this.rid = rid;
+ this.node = node;
+ this.error = error;
+ }
+
+ public long getRequestId() {
+ return rid;
+ }
+
+ public Object getError() {
+ return error;
+ }
+
+ public Node getNode() {
+ return node;
+ }
+ }
+
/*
* OSGI COMMANDS
*/
ci.println(this.programmer.addFlow(node, getSampleFlow(node)));
}
- public void _frmremoveflow(CommandInterpreter ci)
- throws UnknownHostException {
+ public void _frmremoveflow(CommandInterpreter ci) throws UnknownHostException {
Node node = null;
String nodeId = ci.nextArgument();
if (nodeId == null) {
}
private Flow getSampleFlow(Node node) throws UnknownHostException {
- NodeConnector port = NodeConnectorCreator.createOFNodeConnector(
- (short) 24, node);
- NodeConnector oport = NodeConnectorCreator.createOFNodeConnector(
- (short) 30, node);
- byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78,
- (byte) 0x9a, (byte) 0xbc };
- byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d,
- (byte) 0x5e, (byte) 0x6f };
+ NodeConnector port = NodeConnectorCreator.createOFNodeConnector((short) 24, node);
+ NodeConnector oport = NodeConnectorCreator.createOFNodeConnector((short) 30, node);
+ byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78, (byte) 0x9a, (byte) 0xbc };
+ byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d, (byte) 0x5e, (byte) 0x6f };
InetAddress srcIP = InetAddress.getByName("172.28.30.50");
InetAddress dstIP = InetAddress.getByName("171.71.9.52");
InetAddress ipMask = InetAddress.getByName("255.255.255.0");
}
public void _frmNodeFlows(CommandInterpreter ci) {
+ String nodeId = ci.nextArgument();
+ Node node = Node.fromString(nodeId);
+ if (node == null) {
+ ci.println("frmNodeFlows <node> [verbose]");
+ return;
+ }
boolean verbose = false;
String verboseCheck = ci.nextArgument();
if (verboseCheck != null) {
verbose = verboseCheck.equals("true");
}
+ if (!nodeFlows.containsKey(node)) {
+ return;
+ }
// Dump per node database
- for (Entry<Node, Set<FlowEntryInstall>> entry : this.nodeFlows
- .entrySet()) {
- Node node = entry.getKey();
- for (FlowEntryInstall flow : entry.getValue()) {
- if (!verbose) {
- ci.println(node + " " + flow.getFlowName());
- } else {
- ci.println(node + " " + flow.toString());
- }
+ for (FlowEntryInstall entry : nodeFlows.get(node)) {
+ if (!verbose) {
+ ci.println(node + " " + installedSwView.get(entry).getFlowName());
+ } else {
+ ci.println(node + " " + installedSwView.get(entry).toString());
}
}
}
public void _frmGroupFlows(CommandInterpreter ci) {
+ String group = ci.nextArgument();
+ if (group == null) {
+ ci.println("frmGroupFlows <group> [verbose]");
+ return;
+ }
boolean verbose = false;
String verboseCheck = ci.nextArgument();
if (verboseCheck != null) {
verbose = verboseCheck.equalsIgnoreCase("true");
}
+ if (!groupFlows.containsKey(group)) {
+ return;
+ }
// Dump per node database
- for (Entry<String, Set<FlowEntryInstall>> entry : this.groupFlows
- .entrySet()) {
- String group = entry.getKey();
- ci.println("Group " + group + ":");
- for (FlowEntryInstall flow : entry.getValue()) {
- if (!verbose) {
- ci.println(flow.getNode() + " " + flow.getFlowName());
- } else {
- ci.println(flow.getNode() + " " + flow.toString());
- }
+ ci.println("Group " + group + ":\n");
+ for (FlowEntryInstall flowEntry : groupFlows.get(group)) {
+ if (!verbose) {
+ ci.println(flowEntry.getNode() + " " + flowEntry.getFlowName());
+ } else {
+ ci.println(flowEntry.getNode() + " " + flowEntry.toString());
}
}
}
@Override
public void flowRemoved(Node node, Flow flow) {
log.trace("Received flow removed notification on {} for {}", node, flow);
- // For flow entry identification, only match and priority matter
- FlowEntry toFind = new FlowEntry("any", "any", flow, node);
- FlowEntryInstall installedEntry = this.findMatch(toFind, false);
+
+ // For flow entry identification, only node, match and priority matter
+ FlowEntryInstall test = new FlowEntryInstall(new FlowEntry("","",flow, node), null);
+ FlowEntryInstall installedEntry = this.installedSwView.get(test);
if (installedEntry == null) {
- log.trace("Entry is not know to us");
+ log.trace("Entry is not known to us");
return;
}
// Update Static flow status
+ Integer key = 0;
+ FlowConfig target = null;
for (Map.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
FlowConfig conf = entry.getValue();
if (conf.isByNameAndNodeIdEqual(installedEntry.getFlowName(), node)) {
- // Update Configuration database
- conf.toggleStatus();
+ key = entry.getKey();
+ target = conf;
break;
}
}
+ if (target != null) {
+ // Update Configuration database
+ target.toggleInstallation();
+ target.setStatus(SUCCESS);
+ staticFlows.put(key, target);
+ }
+
// Update software views
this.updateLocalDatabase(installedEntry, false);
}
@Override
- public synchronized void flowErrorReported(Node node, long rid, Object err) {
- log.trace("Got error {} for message rid {} from node {}",
- new Object[] {err, rid, node });
+ public void flowErrorReported(Node node, long rid, Object err) {
+ log.trace("Got error {} for message rid {} from node {}", new Object[] { err, rid, node });
+ pendingEvents.offer(new ErrorReportedEvent(rid, node, err));
+ }
+
+ private void processErrorEvent(ErrorReportedEvent event) {
+ Node node = event.getNode();
+ long rid = event.getRequestId();
+ Object error = event.getError();
+ String errorString = (error == null) ? "Not provided" : error.toString();
/*
- * If this was for a flow install, remove the corresponding entry
- * from the software view. If it was a Looking for the rid going through the
- * software database.
- * TODO: A more efficient rid <-> FlowEntryInstall mapping will
- * have to be added in future
+ * If this was for a flow install, remove the corresponding entry from
+ * the software view. If it was a Looking for the rid going through the
+ * software database. TODO: A more efficient rid <-> FlowEntryInstall
+ * mapping will have to be added in future
*/
- Set<FlowEntryInstall> entries = nodeFlows.get(node);
- if (entries != null) {
- FlowEntryInstall target = null;
- for (FlowEntryInstall entry : entries) {
- if (entry.getRequestId() == rid) {
- target = entry;
- break;
- }
- }
- if (target != null) {
- // This was a flow install, update database
- this.updateLocalDatabase(target, false);
+ FlowEntryInstall target = null;
+ for (FlowEntryInstall index : nodeFlows.get(node)) {
+ FlowEntryInstall entry = installedSwView.get(index);
+ if (entry.getRequestId() == rid) {
+ target = entry;
+ break;
}
}
+ if (target != null) {
+ // This was a flow install, update database
+ this.updateLocalDatabase(target, false);
+ }
// Notify listeners
if (frmAware != null) {
synchronized (frmAware) {
for (IForwardingRulesManagerAware frma : frmAware) {
try {
- frma.requestFailed(rid, err.toString());
+ frma.requestFailed(rid, errorString);
} catch (Exception e) {
log.warn("Failed to notify {}", frma);
}
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
-
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.osgi.framework.ServiceReference;
import org.opendaylight.controller.sal.action.Action;
import org.opendaylight.controller.sal.action.Drop;
import org.opendaylight.controller.sal.core.Node;
-import org.opendaylight.controller.sal.core.NodeConnector;
import org.opendaylight.controller.sal.flowprogrammer.Flow;
import org.opendaylight.controller.sal.match.Match;
import org.opendaylight.controller.sal.match.MatchType;
-import org.opendaylight.controller.sal.reader.FlowOnNode;
-import org.opendaylight.controller.sal.reader.NodeConnectorStatistics;
-import org.opendaylight.controller.sal.reader.NodeDescription;
import org.opendaylight.controller.sal.utils.NodeCreator;
import org.opendaylight.controller.sal.utils.Status;
import org.opendaylight.controller.sal.utils.StatusCode;
assertNotNull(bc);
boolean debugit = false;
Bundle b[] = bc.getBundles();
- for (int i = 0; i < b.length; i++) {
- int state = b[i].getState();
+ for (Bundle element : b) {
+ int state = element.getState();
if (state != Bundle.ACTIVE && state != Bundle.RESOLVED) {
- log.debug("Bundle:" + b[i].getSymbolicName() + " state:"
+ log.debug("Bundle:" + element.getSymbolicName() + " state:"
+ stateToString(state));
debugit = true;
}
package org.opendaylight.controller.hosttracker.hostAware;
+import java.io.Serializable;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.InetAddress;
@XmlRootElement(name = "host")
@XmlAccessorType(XmlAccessType.NONE)
-public class HostNodeConnector extends Host {
+public class HostNodeConnector extends Host implements Serializable {
private static final long serialVersionUID = 1L;
@XmlElement
private NodeConnector nodeConnector;
// only 1 switch state listener
private ISwitchStateListener switchStateListener;
private AtomicInteger switchInstanceNumber;
+ private int MAXQUEUESIZE = 50000;
/*
* this thread monitors the switchEvents queue for new incoming events from
public void init() {
logger.debug("Initializing!");
this.switches = new ConcurrentHashMap<Long, ISwitch>();
- this.switchEvents = new LinkedBlockingQueue<SwitchEvent>();
+ this.switchEvents = new LinkedBlockingQueue<SwitchEvent>(MAXQUEUESIZE);
this.messageListeners = new ConcurrentHashMap<OFType, IMessageListener>();
this.switchStateListener = null;
this.switchInstanceNumber = new AtomicInteger(0);
running = true;
while (running) {
try {
- if (!transmitQ.isEmpty()) {
+ while (!transmitQ.isEmpty()) {
PriorityMessage pmsg = transmitQ.poll();
msgReadWriteService.asyncSend(pmsg.msg);
logger.trace("Message sent: {}", pmsg);
package org.opendaylight.controller.protocol_plugin.openflow.internal;
+import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
/**
* The class describes neighbor discovery service for an OpenFlow network.
*/
-public class DiscoveryService implements IInventoryShimExternalListener,
- IDataPacketListen, IContainerListener, CommandProvider {
- private static Logger logger = LoggerFactory
- .getLogger(DiscoveryService.class);
+public class DiscoveryService implements IInventoryShimExternalListener, IDataPacketListen, IContainerListener,
+ CommandProvider {
+ private static Logger logger = LoggerFactory.getLogger(DiscoveryService.class);
private IController controller = null;
private IDiscoveryListener discoveryListener = null;
private IInventoryProvider inventoryProvider = null;
private IDataPacketMux iDataPacketMux = null;
-
- private List<NodeConnector> readyListHi = null; // newly added ports go into
- // this list and will be
- // served first
- private List<NodeConnector> readyListLo = null; // come here after served at
- // least once
- private List<NodeConnector> waitingList = null; // staging area during quiet
- // period
- private ConcurrentMap<NodeConnector, Integer> pendingMap = null;// wait for
- // response
- // back
- private ConcurrentMap<NodeConnector, Edge> edgeMap = null; // openflow edges
- // keyed by head
- // connector
- private ConcurrentMap<NodeConnector, Integer> agingMap = null; // aging
- // entries
- // keyed by
- // edge port
- private ConcurrentMap<NodeConnector, Edge> prodMap = null; // production
- // edges keyed by
- // edge port
-
- private Timer discoveryTimer; // discovery timer
- private DiscoveryTimerTask discoveryTimerTask; // timer task
+ // Newly added ports go into this list and will be served first
+ private List<NodeConnector> readyListHi = null;
+ // Come here after served at least once
+ private List<NodeConnector> readyListLo = null;
+ // Staging area during quiet period
+ private List<NodeConnector> waitingList = null;
+ // Wait for next discovery packet. The map contains the time elapsed since
+ // the last received LLDP frame on each node connector
+ private ConcurrentMap<NodeConnector, Integer> pendingMap = null;
+ // openflow edges keyed by head connector
+ private ConcurrentMap<NodeConnector, Edge> edgeMap = null;
+ // Aging entries keyed by head edge connector
+ private ConcurrentMap<NodeConnector, Integer> agingMap = null;
+ // Production edges keyed by head edge connector
+ private ConcurrentMap<NodeConnector, Edge> prodMap = null;
+
+ private Timer discoveryTimer;
+ private DiscoveryTimerTask discoveryTimerTask;
private long discoveryTimerTick = 1L * 1000; // per tick in msec
private int discoveryTimerTickCount = 0; // main tick counter
- private int discoveryBatchMaxPorts = 500; // max # of ports handled in one
- // batch
- private int discoveryBatchRestartTicks = getDiscoveryInterval(); // periodically
- // restart
- // batching
- // process
+ // Max # of ports handled in one batch
+ private int discoveryBatchMaxPorts = 500;
+ // Periodically restart batching process
+ private int discoveryBatchRestartTicks = getDiscoveryInterval();
private int discoveryBatchPausePeriod = 5; // pause for few secs
- private int discoveryBatchPauseTicks = discoveryBatchRestartTicks
- - discoveryBatchPausePeriod; // pause after this point
- private int discoveryRetry = getDiscoveryRetry(); // number of retries after
- // initial timeout
+ // Pause after this point
+ private int discoveryBatchPauseTicks = discoveryBatchRestartTicks - discoveryBatchPausePeriod;
+ // Number of retries after initial timeout
+ private int discoveryRetry = getDiscoveryRetry();
private int discoveryTimeoutTicks = getDiscoveryTimeout(); // timeout in sec
private int discoveryAgeoutTicks = 120; // age out 2 min
- private int discoveryConsistencyCheckMultiple = 2; // multiple of
- // discoveryBatchRestartTicks
- private int discoveryConsistencyCheckTickCount = discoveryBatchPauseTicks; // CC
- // tick
- // counter
- private int discoveryConsistencyCheckCallingTimes = 0; // # of times CC gets
- // called
- private int discoveryConsistencyCheckCorrected = 0; // # of cases CC
- // corrected
- private boolean discoveryConsistencyCheckEnabled = true;// enable or disable
- // CC
- private boolean discoveryAgingEnabled = true; // enable or disable aging
- private boolean discoverySnoopingEnabled = true; // global flag to enable or
- // disable LLDP snooping
- private List<NodeConnector> discoverySnoopingDisableList; // the list of
- // ports that will
- // not do LLDP
- // snooping
+ // multiple of discoveryBatchRestartTicks
+ private int discoveryConsistencyCheckMultiple = 2;
+ // CC tick counter
+ private int discoveryConsistencyCheckTickCount = discoveryBatchPauseTicks;
+ // # of times CC getscalled
+ private int discoveryConsistencyCheckCallingTimes = 0;
+ // # of cases CC corrected
+ private int discoveryConsistencyCheckCorrected = 0;
+ // Enable or disable CC
+ private boolean discoveryConsistencyCheckEnabled = true;
+ // Enable or disable aging
+ private boolean discoveryAgingEnabled = true;
+ // Global flag to enable or disable LLDP snooping
+ private boolean discoverySnoopingEnabled = true;
+ // The list of ports that will not do LLDP snooping
+ private List<NodeConnector> discoverySnoopingDisableList;
private BlockingQueue<NodeConnector> transmitQ;
private Thread transmitThread;
private Boolean throttling = false; // if true, no more batching.
this.transmitQ = transmitQ;
}
+ @Override
public void run() {
while (true) {
try {
nodeConnector = null;
} catch (InterruptedException e1) {
logger.warn("DiscoveryTransmit interupted", e1.getMessage());
- if (shuttingDown)
+ if (shuttingDown) {
return;
+ }
} catch (Exception e2) {
logger.error("", e2);
}
}
class DiscoveryTimerTask extends TimerTask {
+ @Override
public void run() {
checkTimeout();
checkAging();
}
private RawPacket createDiscoveryPacket(NodeConnector nodeConnector) {
- String nodeId = HexEncode.longToHexString((Long) nodeConnector
- .getNode().getID());
+ String nodeId = HexEncode.longToHexString((Long) nodeConnector.getNode().getID());
// Create LLDP ChassisID TLV
byte[] cidValue = LLDPTLV.createChassisIDTLVValue(nodeId);
- chassisIdTlv.setType((byte) LLDPTLV.TLVType.ChassisID.getValue())
- .setLength((short) cidValue.length).setValue(cidValue);
+ chassisIdTlv.setType(LLDPTLV.TLVType.ChassisID.getValue()).setLength((short) cidValue.length)
+ .setValue(cidValue);
// Create LLDP PortID TLV
String portId = nodeConnector.getNodeConnectorIDString();
byte[] pidValue = LLDPTLV.createPortIDTLVValue(portId);
- portIdTlv.setType((byte) LLDPTLV.TLVType.PortID.getValue())
- .setLength((short) pidValue.length).setValue(pidValue);
+ portIdTlv.setType(LLDPTLV.TLVType.PortID.getValue()).setLength((short) pidValue.length).setValue(pidValue);
// Create LLDP Custom TLV
- byte[] customValue = LLDPTLV.createCustomTLVValue(nodeConnector
- .toString());
- customTlv.setType((byte) LLDPTLV.TLVType.Custom.getValue())
- .setLength((short) customValue.length).setValue(customValue);
+ byte[] customValue = LLDPTLV.createCustomTLVValue(nodeConnector.toString());
+ customTlv.setType(LLDPTLV.TLVType.Custom.getValue()).setLength((short) customValue.length)
+ .setValue(customValue);
// Create LLDP Custom Option list
List<LLDPTLV> customList = new ArrayList<LLDPTLV>();
// Create discovery pkt
LLDP discoveryPkt = new LLDP();
- discoveryPkt.setChassisId(chassisIdTlv).setPortId(portIdTlv)
- .setTtl(ttlTlv).setOptionalTLVList(customList);
+ discoveryPkt.setChassisId(chassisIdTlv).setPortId(portIdTlv).setTtl(ttlTlv).setOptionalTLVList(customList);
RawPacket rawPkt = null;
try {
// Create ethernet pkt
- byte[] sourceMac = getSouceMACFromNodeID(nodeId);
+ byte[] sourceMac = getSourceMACFromNodeID(nodeId);
Ethernet ethPkt = new Ethernet();
- ethPkt.setSourceMACAddress(sourceMac)
- .setDestinationMACAddress(LLDP.LLDPMulticastMac)
- .setEtherType(EtherTypes.LLDP.shortValue())
- .setPayload(discoveryPkt);
+ ethPkt.setSourceMACAddress(sourceMac).setDestinationMACAddress(LLDP.LLDPMulticastMac)
+ .setEtherType(EtherTypes.LLDP.shortValue()).setPayload(discoveryPkt);
byte[] data = ethPkt.serialize();
rawPkt = new RawPacket(data);
rawPkt.setOutgoingNodeConnector(nodeConnector);
} catch (ConstructionException cex) {
- logger.warn("RawPacket creation caught exception {}",
- cex.getMessage());
+ logger.warn("RawPacket creation caught exception {}", cex.getMessage());
} catch (Exception e) {
logger.error("Failed to serialize the LLDP packet: " + e);
}
return rawPkt;
}
- private void sendDiscoveryPacket(NodeConnector nodeConnector,
- RawPacket outPkt) {
+ private void sendDiscoveryPacket(NodeConnector nodeConnector, RawPacket outPkt) {
if (nodeConnector == null) {
logger.debug("Can not send discovery packet out since nodeConnector is null");
return;
ISwitch sw = controller.getSwitches().get(sid);
if (sw == null) {
- logger.debug(
- "Can not send discovery packet out since switch {} is null",
- sid);
+ logger.debug("Can not send discovery packet out since switch {} is null", sid);
return;
}
if (!sw.isOperational()) {
- logger.debug(
- "Can not send discovery packet out since switch {} is not operational",
- sw);
+ logger.debug("Can not send discovery packet out since switch {} is not operational", sw);
return;
}
return PacketResult.IGNORED;
}
- if (((Short) inPkt.getIncomingNodeConnector().getID())
- .equals(NodeConnector.SPECIALNODECONNECTORID)) {
+ if (((Short) inPkt.getIncomingNodeConnector().getID()).equals(NodeConnector.SPECIALNODECONNECTORID)) {
logger.trace("Ignoring ethernet packet received on special port: "
+ inPkt.getIncomingNodeConnector().toString());
return PacketResult.IGNORED;
try {
ethPkt.deserialize(data, 0, data.length * NetUtils.NumBitsInAByte);
} catch (Exception e) {
- logger.warn("Failed to decode LLDP packet from {}: {}",
- inPkt.getIncomingNodeConnector(), e);
+ logger.warn("Failed to decode LLDP packet from {}: {}", inPkt.getIncomingNodeConnector(), e);
return PacketResult.IGNORED;
}
NodeConnector dst = inPkt.getIncomingNodeConnector();
if (isEnabled(dst)) {
if (!processDiscoveryPacket(dst, ethPkt)) {
- /* Snoop the discovery pkt if not generated from us */
+ // Snoop the discovery pkt if not generated from us
snoopDiscoveryPacket(dst, ethPkt);
}
return PacketResult.CONSUME;
* Snoop incoming discovery frames generated by the production network
* neighbor switch
*/
- private void snoopDiscoveryPacket(NodeConnector dstNodeConnector,
- Ethernet ethPkt) {
- if (!this.discoverySnoopingEnabled
- || discoverySnoopingDisableList.contains(dstNodeConnector)) {
- logger.trace(
- "Discarded received discovery packet on {} since snooping is turned off",
- dstNodeConnector);
+ private void snoopDiscoveryPacket(NodeConnector dstNodeConnector, Ethernet ethPkt) {
+ if (!this.discoverySnoopingEnabled || discoverySnoopingDisableList.contains(dstNodeConnector)) {
+ logger.trace("Discarded received discovery packet on {} since snooping is turned off", dstNodeConnector);
return;
}
LLDP lldp = (LLDP) ethPkt.getPayload();
try {
- String nodeId = LLDPTLV.getHexStringValue(lldp.getChassisId()
- .getValue(), lldp.getChassisId().getLength());
- String portId = LLDPTLV.getStringValue(lldp.getPortId().getValue(),
- lldp.getPortId().getLength());
+ String nodeId = LLDPTLV.getHexStringValue(lldp.getChassisId().getValue(), lldp.getChassisId().getLength());
+ String portId = LLDPTLV.getStringValue(lldp.getPortId().getValue(), lldp.getPortId().getLength());
byte[] systemNameBytes = null;
// get system name if present in the LLDP pkt
for (LLDPTLV lldptlv : lldp.getOptionalTLVList()) {
break;
}
}
- String nodeName = (systemNameBytes == null) ? nodeId : new String(
- systemNameBytes);
+ String nodeName = (systemNameBytes == null) ? nodeId
+ : new String(systemNameBytes, Charset.defaultCharset());
Node srcNode = new Node(Node.NodeIDType.PRODUCTION, nodeName);
- NodeConnector srcNodeConnector = NodeConnectorCreator
- .createNodeConnector(
- NodeConnector.NodeConnectorIDType.PRODUCTION,
- portId, srcNode);
+ NodeConnector srcNodeConnector = NodeConnectorCreator.createNodeConnector(
+ NodeConnector.NodeConnectorIDType.PRODUCTION, portId, srcNode);
Edge edge = null;
Set<Property> props = null;
*
* @return true if it's a success
*/
- private boolean processDiscoveryPacket(NodeConnector dstNodeConnector,
- Ethernet ethPkt) {
+ private boolean processDiscoveryPacket(NodeConnector dstNodeConnector, Ethernet ethPkt) {
if ((dstNodeConnector == null) || (ethPkt == null)) {
logger.trace("Ignoring processing of discovery packet: Null node connector or packet");
return false;
}
- logger.trace("Handle discovery packet {} from {}", ethPkt,
- dstNodeConnector);
+ logger.trace("Handle discovery packet {} from {}", ethPkt, dstNodeConnector);
LLDP lldp = (LLDP) ethPkt.getPayload();
List<LLDPTLV> optionalTLVList = lldp.getOptionalTLVList();
if (optionalTLVList == null) {
- logger.info("The discovery packet with null custom option from {}",
- dstNodeConnector);
+ logger.info("The discovery packet with null custom option from {}", dstNodeConnector);
return false;
}
NodeConnector srcNodeConnector = null;
for (LLDPTLV lldptlv : lldp.getOptionalTLVList()) {
if (lldptlv.getType() == LLDPTLV.TLVType.Custom.getValue()) {
- String ncString = LLDPTLV.getCustomString(lldptlv.getValue(),
- lldptlv.getLength());
+ String ncString = LLDPTLV.getCustomString(lldptlv.getValue(), lldptlv.getLength());
srcNodeConnector = NodeConnector.fromString(ncString);
if (srcNodeConnector != null) {
srcNode = srcNodeConnector.getNode();
- /* Check if it's expected */
- if (isTracked(srcNodeConnector)) {
- break;
- } else {
- srcNode = null;
- srcNodeConnector = null;
- }
}
}
}
if ((srcNode == null) || (srcNodeConnector == null)) {
- logger.trace(
- "Received non-controller generated discovery packet from {}",
- dstNodeConnector);
+ logger.trace("Received non-controller generated discovery packet from {}", dstNodeConnector);
return false;
}
logger.error("Caught exception ", e);
}
addEdge(edge, props);
+ pendingMap.put(dstNodeConnector, 0);
logger.trace("Received discovery packet for Edge {}", edge);
return null;
}
- Map<NodeConnector, Map<String, Property>> props = inventoryProvider
- .getNodeConnectorProps(false);
+ Map<NodeConnector, Map<String, Property>> props = inventoryProvider.getNodeConnectorProps(false);
if (props == null) {
return null;
}
return null;
}
- Property prop = (Property) propMap.get(propName);
+ Property prop = propMap.get(propName);
return prop;
}
Config config = (Config) getProp(nodeConnector, Config.ConfigPropName);
State state = (State) getProp(nodeConnector, State.StatePropName);
- return ((config != null) && (config.getValue() == Config.ADMIN_UP)
- && (state != null) && (state.getValue() == State.EDGE_UP));
+ return ((config != null) && (config.getValue() == Config.ADMIN_UP) && (state != null) && (state.getValue() == State.EDGE_UP));
}
private boolean isTracked(NodeConnector nodeConnector) {
private void addDiscovery(Node node) {
Map<Long, ISwitch> switches = controller.getSwitches();
- ISwitch sw = switches.get((Long) node.getID());
+ ISwitch sw = switches.get(node.getID());
List<OFPhysicalPort> ports = sw.getEnabledPorts();
if (ports == null) {
return;
}
for (OFPhysicalPort port : ports) {
- NodeConnector nodeConnector = NodeConnectorCreator
- .createOFNodeConnector(port.getPortNumber(), node);
+ NodeConnector nodeConnector = NodeConnectorCreator.createOFNodeConnector(port.getPortNumber(), node);
if (!readyListHi.contains(nodeConnector)) {
readyListHi.add(nodeConnector);
}
readyListHi.add(nodeConnector);
}
- private Set<NodeConnector> getRemoveSet(Collection<NodeConnector> c,
- Node node) {
+ private Set<NodeConnector> getRemoveSet(Collection<NodeConnector> c, Node node) {
Set<NodeConnector> removeSet = new HashSet<NodeConnector>();
if (c == null) {
return removeSet;
private void checkTimeout() {
Set<NodeConnector> removeSet = new HashSet<NodeConnector>();
Set<NodeConnector> retrySet = new HashSet<NodeConnector>();
- int sentCount;
+ int ticks;
Set<NodeConnector> pendingSet = pendingMap.keySet();
if (pendingSet != null) {
for (NodeConnector nodeConnector : pendingSet) {
- sentCount = pendingMap.get(nodeConnector);
- pendingMap.put(nodeConnector, ++sentCount);
- if (sentCount > getDiscoveryFinalTimeoutInterval()) {
+ ticks = pendingMap.get(nodeConnector);
+ pendingMap.put(nodeConnector, ++ticks);
+ if (ticks > getDiscoveryFinalTimeoutInterval()) {
// timeout the edge
removeSet.add(nodeConnector);
logger.trace("Discovery timeout {}", nodeConnector);
- } else if (sentCount % discoveryTimeoutTicks == 0) {
+ } else if (ticks % discoveryTimeoutTicks == 0) {
retrySet.add(nodeConnector);
}
}
}
Set<NodeConnector> removeSet = new HashSet<NodeConnector>();
- int sentCount;
+ int ticks;
Set<NodeConnector> agingSet = agingMap.keySet();
if (agingSet != null) {
for (NodeConnector nodeConnector : agingSet) {
- sentCount = agingMap.get(nodeConnector);
- agingMap.put(nodeConnector, ++sentCount);
- if (sentCount > discoveryAgeoutTicks) {
+ ticks = agingMap.get(nodeConnector);
+ agingMap.put(nodeConnector, ++ticks);
+ if (ticks > discoveryAgeoutTicks) {
// age out the edge
removeSet.add(nodeConnector);
logger.trace("Discovery age out {}", nodeConnector);
private void doDiscovery() {
if (++discoveryTimerTickCount <= discoveryBatchPauseTicks) {
for (NodeConnector nodeConnector : getWorkingSet()) {
- pendingMap.put(nodeConnector, 0);
transmitQ.add(nodeConnector);
}
} else if (discoveryTimerTickCount >= discoveryBatchRestartTicks) {
discoveryTimerTickCount = 0;
for (NodeConnector nodeConnector : waitingList) {
- if (!readyListLo.contains(nodeConnector))
+ if (!readyListLo.contains(nodeConnector)) {
readyListLo.add(nodeConnector);
+ }
}
waitingList.removeAll(readyListLo);
}
return;
}
- if (++discoveryConsistencyCheckTickCount
- % getDiscoveryConsistencyCheckInterval() != 0) {
+ if (++discoveryConsistencyCheckTickCount % getDiscoveryConsistencyCheckInterval() != 0) {
return;
}
if (!isEnabled(nodeConnector)) {
removeSet.add(nodeConnector);
discoveryConsistencyCheckCorrected++;
- logger.debug("ConsistencyChecker: remove disabled {}",
- nodeConnector);
+ logger.debug("ConsistencyChecker: remove disabled {}", nodeConnector);
continue;
}
if (!isTracked(nodeConnector)) {
waitingList.add(nodeConnector);
discoveryConsistencyCheckCorrected++;
- logger.debug("ConsistencyChecker: add back untracked {}",
- nodeConnector);
+ logger.debug("ConsistencyChecker: add back untracked {}", nodeConnector);
continue;
}
}
if (!isEnabled(nodeConnector)) {
removeSet.add(nodeConnector);
discoveryConsistencyCheckCorrected++;
- logger.debug("ConsistencyChecker: remove disabled {}",
- nodeConnector);
+ logger.debug("ConsistencyChecker: remove disabled {}", nodeConnector);
}
}
waitingList.removeAll(removeSet);
for (ISwitch sw : switches.values()) {
for (OFPhysicalPort port : sw.getEnabledPorts()) {
Node node = NodeCreator.createOFNode(sw.getId());
- NodeConnector nodeConnector = NodeConnectorCreator
- .createOFNodeConnector(port.getPortNumber(), node);
+ NodeConnector nodeConnector = NodeConnectorCreator.createOFNodeConnector(port.getPortNumber(), node);
if (!isTracked(nodeConnector)) {
waitingList.add(nodeConnector);
discoveryConsistencyCheckCorrected++;
- logger.debug("ConsistencyChecker: add back untracked {}",
- nodeConnector);
+ logger.debug("ConsistencyChecker: add back untracked {}", nodeConnector);
}
}
}
NodeConnector src = edge.getTailNodeConnector();
if (!src.getType().equals(NodeConnector.NodeConnectorIDType.PRODUCTION)) {
pendingMap.remove(src);
- if (!waitingList.contains(src)) {
- waitingList.add(src);
- }
} else {
NodeConnector dst = edge.getHeadNodeConnector();
agingMap.put(dst, 0);
}
- // notify routeEngine
+ // notify
updateEdge(edge, UpdateType.ADDED, props);
logger.trace("Add edge {}", edge);
}
/* Do not update in case there is an existing OpenFlow link */
if (edgeMap.get(edgePort) != null) {
- logger.trace(
- "Discarded edge {} since there is an existing OF link {}",
- edge, edgeMap.get(edgePort));
+ logger.trace("Discarded edge {} since there is an existing OF link {}", edge, edgeMap.get(edgePort));
return;
}
this.discoveryListener.notifyEdge(edge, type, props);
- NodeConnector src = edge.getTailNodeConnector(), dst = edge
- .getHeadNodeConnector();
+ NodeConnector src = edge.getTailNodeConnector(), dst = edge.getHeadNodeConnector();
if (!src.getType().equals(NodeConnector.NodeConnectorIDType.PRODUCTION)) {
if (type == UpdateType.ADDED) {
edgeMap.put(dst, edge);
}
}
- private void moreToReadyListHi(NodeConnector nodeConnector) {
+ private void moveToReadyListHi(NodeConnector nodeConnector) {
if (readyListLo.contains(nodeConnector)) {
readyListLo.remove(nodeConnector);
- readyListHi.add(nodeConnector);
} else if (waitingList.contains(nodeConnector)) {
waitingList.remove(nodeConnector);
- readyListHi.add(nodeConnector);
}
+ readyListHi.add(nodeConnector);
}
private void registerWithOSGIConsole() {
- BundleContext bundleContext = FrameworkUtil.getBundle(this.getClass())
- .getBundleContext();
- bundleContext.registerService(CommandProvider.class.getName(), this,
- null);
+ BundleContext bundleContext = FrameworkUtil.getBundle(this.getClass()).getBundleContext();
+ bundleContext.registerService(CommandProvider.class.getName(), this, null);
}
private int getDiscoveryConsistencyCheckInterval() {
}
public void _ppl(CommandInterpreter ci) {
- ci.println("PendingList\n");
- for (NodeConnector nodeConnector : pendingMap.keySet()) {
- if (nodeConnector == null) {
- continue;
- }
- ci.println(nodeConnector);
+ ci.println("pendingMap\n");
+ ci.println(" NodeConnector Last rx LLDP (s)");
+ for (ConcurrentMap.Entry<NodeConnector, Integer> entry: pendingMap.entrySet()) {
+ ci.println(entry.getKey() + "\t\t" + entry.getValue());
}
}
}
ci.println("Interval " + getDiscoveryConsistencyCheckInterval());
ci.println("Multiple " + discoveryConsistencyCheckMultiple);
- ci.println("Number of times called "
- + discoveryConsistencyCheckCallingTimes);
+ ci.println("Number of times called " + discoveryConsistencyCheckCallingTimes);
ci.println("Corrected count " + discoveryConsistencyCheckCorrected);
}
}
public void _psize(CommandInterpreter ci) {
- ci.println("readyListLo size " + readyListLo.size() + "\n"
- + "readyListHi size " + readyListHi.size() + "\n"
- + "waitingList size " + waitingList.size() + "\n"
- + "pendingMap size " + pendingMap.size() + "\n"
- + "edgeMap size " + edgeMap.size() + "\n" + "prodMap size "
- + prodMap.size() + "\n" + "agingMap size " + agingMap.size());
+ ci.println("readyListLo size " + readyListLo.size() + "\n" + "readyListHi size " + readyListHi.size() + "\n"
+ + "waitingList size " + waitingList.size() + "\n" + "pendingMap size " + pendingMap.size() + "\n"
+ + "edgeMap size " + edgeMap.size() + "\n" + "prodMap size " + prodMap.size() + "\n" + "agingMap size "
+ + agingMap.size());
}
public void _page(CommandInterpreter ci) {
public void _sage(CommandInterpreter ci) {
String val = ci.nextArgument();
if (val == null) {
- ci.println("Please enter aging time limit. Current value "
- + this.discoveryAgeoutTicks);
+ ci.println("Please enter aging time limit. Current value " + this.discoveryAgeoutTicks);
return;
}
try {
public void _scc(CommandInterpreter ci) {
String val = ci.nextArgument();
if (val == null) {
- ci.println("Please enter CC multiple. Current multiple "
- + discoveryConsistencyCheckMultiple + " (interval "
- + getDiscoveryConsistencyCheckInterval()
- + ") calling times "
+ ci.println("Please enter CC multiple. Current multiple " + discoveryConsistencyCheckMultiple
+ + " (interval " + getDiscoveryConsistencyCheckInterval() + ") calling times "
+ discoveryConsistencyCheckCallingTimes);
return;
}
NodeConnector nodeConnector = NodeConnector.fromString(val);
if (nodeConnector != null) {
discoverySnoopingDisableList.remove(nodeConnector);
- ci.println("Discovery snooping is locally enabled on port "
- + nodeConnector);
+ ci.println("Discovery snooping is locally enabled on port " + nodeConnector);
} else {
ci.println("Entered invalid NodeConnector " + val);
}
NodeConnector nodeConnector = NodeConnector.fromString(val);
if (nodeConnector != null) {
discoverySnoopingDisableList.add(nodeConnector);
- ci.println("Discovery snooping is locally disabled on port "
- + nodeConnector);
+ ci.println("Discovery snooping is locally disabled on port " + nodeConnector);
} else {
ci.println("Entered invalid NodeConnector " + val);
}
public void _spause(CommandInterpreter ci) {
String val = ci.nextArgument();
- String out = "Please enter pause period less than "
- + discoveryBatchRestartTicks + ". Current pause period is "
- + discoveryBatchPausePeriod + " pause tick is "
- + discoveryBatchPauseTicks + ".";
+ String out = "Please enter pause period less than " + discoveryBatchRestartTicks + ". Current pause period is "
+ + discoveryBatchPausePeriod + " pause tick is " + discoveryBatchPauseTicks + ".";
if (val != null) {
try {
int pause = Integer.parseInt(val);
if (pause < discoveryBatchRestartTicks) {
discoveryBatchPausePeriod = pause;
- discoveryBatchPauseTicks = discoveryBatchRestartTicks
- - discoveryBatchPausePeriod;
+ discoveryBatchPauseTicks = discoveryBatchRestartTicks - discoveryBatchPausePeriod;
return;
}
} catch (Exception e) {
public void _sdi(CommandInterpreter ci) {
String val = ci.nextArgument();
- String out = "Please enter discovery interval greater than "
- + discoveryBatchPausePeriod + ". Current value is "
- + discoveryBatchRestartTicks + ".";
+ String out = "Please enter discovery interval greater than " + discoveryBatchPausePeriod
+ + ". Current value is " + discoveryBatchRestartTicks + ".";
if (val != null) {
try {
int restart = Integer.parseInt(val);
if (restart > discoveryBatchPausePeriod) {
discoveryBatchRestartTicks = restart;
- discoveryBatchPauseTicks = discoveryBatchRestartTicks
- - discoveryBatchPausePeriod;
+ discoveryBatchPauseTicks = discoveryBatchRestartTicks - discoveryBatchPausePeriod;
return;
}
} catch (Exception e) {
public void _sports(CommandInterpreter ci) {
String val = ci.nextArgument();
if (val == null) {
- ci.println("Please enter max ports per batch. Current value is "
- + discoveryBatchMaxPorts);
+ ci.println("Please enter max ports per batch. Current value is " + discoveryBatchMaxPorts);
return;
}
try {
public void _sretry(CommandInterpreter ci) {
String val = ci.nextArgument();
if (val == null) {
- ci.println("Please enter number of retries. Current value is "
- + discoveryRetry);
+ ci.println("Please enter number of retries. Current value is " + discoveryRetry);
return;
}
try {
public void _stm(CommandInterpreter ci) {
String val = ci.nextArgument();
- String out = "Please enter timeout tick value less than "
- + discoveryBatchRestartTicks + ". Current value is "
+ String out = "Please enter timeout tick value less than " + discoveryBatchRestartTicks + ". Current value is "
+ discoveryTimeoutTicks;
if (val != null) {
try {
}
@Override
- public void updateNodeConnector(NodeConnector nodeConnector,
- UpdateType type, Set<Property> props) {
+ public void updateNodeConnector(NodeConnector nodeConnector, UpdateType type, Set<Property> props) {
Config config = null;
State state = null;
boolean enabled = false;
state = (State) prop;
}
}
- enabled = ((config != null) && (config.getValue() == Config.ADMIN_UP)
- && (state != null) && (state.getValue() == State.EDGE_UP));
+ enabled = ((config != null) && (config.getValue() == Config.ADMIN_UP) && (state != null) && (state.getValue() == State.EDGE_UP));
switch (type) {
case ADDED:
}
public void addNode(Node node, Set<Property> props) {
- if (node == null)
+ if (node == null) {
return;
+ }
addDiscovery(node);
}
public void removeNode(Node node) {
- if (node == null)
+ if (node == null) {
return;
+ }
removeDiscovery(node);
}
private void initDiscoveryPacket() {
// Create LLDP ChassisID TLV
chassisIdTlv = new LLDPTLV();
- chassisIdTlv.setType((byte) LLDPTLV.TLVType.ChassisID.getValue());
+ chassisIdTlv.setType(LLDPTLV.TLVType.ChassisID.getValue());
// Create LLDP PortID TLV
portIdTlv = new LLDPTLV();
- portIdTlv.setType((byte) LLDPTLV.TLVType.PortID.getValue());
+ portIdTlv.setType(LLDPTLV.TLVType.PortID.getValue());
// Create LLDP TTL TLV
byte[] ttl = new byte[] { (byte) 0, (byte) 120 };
ttlTlv = new LLDPTLV();
- ttlTlv.setType((byte) LLDPTLV.TLVType.TTL.getValue())
- .setLength((short) ttl.length).setValue(ttl);
+ ttlTlv.setType(LLDPTLV.TLVType.TTL.getValue()).setLength((short) ttl.length).setValue(ttl);
customTlv = new LLDPTLV();
}
*
*/
void start() {
- discoveryTimer.schedule(discoveryTimerTask, discoveryTimerTick,
- discoveryTimerTick);
+ discoveryTimer.schedule(discoveryTimerTask, discoveryTimerTick, discoveryTimerTick);
transmitThread.start();
}
}
@Override
- public void tagUpdated(String containerName, Node n, short oldTag,
- short newTag, UpdateType t) {
+ public void tagUpdated(String containerName, Node n, short oldTag, short newTag, UpdateType t) {
}
@Override
- public void containerFlowUpdated(String containerName,
- ContainerFlow previousFlow, ContainerFlow currentFlow, UpdateType t) {
+ public void containerFlowUpdated(String containerName, ContainerFlow previousFlow, ContainerFlow currentFlow,
+ UpdateType t) {
}
@Override
- public void nodeConnectorUpdated(String containerName, NodeConnector p,
- UpdateType t) {
+ public void nodeConnectorUpdated(String containerName, NodeConnector p, UpdateType t) {
switch (t) {
case ADDED:
- moreToReadyListHi(p);
+ moveToReadyListHi(p);
break;
default:
break;
// do nothing
}
- private byte[] getSouceMACFromNodeID(String nodeId) {
+ private byte[] getSourceMACFromNodeID(String nodeId) {
byte[] cid = HexEncode.bytesFromHexString(nodeId);
byte[] sourceMac = new byte[6];
int pos = cid.length - sourceMac.length;
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
package org.opendaylight.controller.sal.action;
+import java.io.Serializable;
+
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import org.slf4j.LoggerFactory;
/**
- * Represents the generic action to be applied to the matched frame/packet/message
+ * Represents the generic action to be applied to the matched
+ * frame/packet/message
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-@XmlSeeAlso({Controller.class, Drop.class, Flood.class, FloodAll.class, HwPath.class, Loopback.class, Output.class,
- PopVlan.class, PushVlan.class, SetDlDst.class, SetDlSrc.class, SetDlType.class, SetNwDst.class, SetNwSrc.class,
- SetNwTos.class, SetTpDst.class, SetTpSrc.class, SetVlanCfi.class, SetVlanId.class, SetVlanPcp.class, SwPath.class})
-public abstract class Action {
+@XmlSeeAlso({ Controller.class, Drop.class, Flood.class, FloodAll.class, HwPath.class, Loopback.class, Output.class,
+ PopVlan.class, PushVlan.class, SetDlDst.class, SetDlSrc.class, SetDlType.class, SetNwDst.class, SetNwSrc.class,
+ SetNwTos.class, SetTpDst.class, SetTpSrc.class, SetVlanCfi.class, SetVlanId.class, SetVlanPcp.class,
+ SwPath.class })
+public abstract class Action implements Serializable {
+ private static final long serialVersionUID = 1L;
private static final Logger logger = LoggerFactory.getLogger(Action.class);
- private static boolean debug = false; // Enable to find where in the code an invalid assignment is made
+ private static boolean debug = false; // Enable to find where in the code an
+ // invalid assignment is made
@XmlTransient
protected ActionType type;
private transient boolean isValid = true;
/* Dummy constructor for JAXB */
- public Action () {
+ public Action() {
}
/*
- public Action (ActionType type, Object value) {
- this.type = type;
- this.value = value;
- this.isValid = true;
- } */
+ * public Action (ActionType type, Object value) { this.type = type;
+ * this.value = value; this.isValid = true; }
+ */
/**
* Checks if the passed value is in the valid range for this action
}
/**
- * Checks if the passed value is in the valid range for the passed action type
- * This method is used for complex Action types which are
+ * Checks if the passed value is in the valid range for the passed action
+ * type This method is used for complex Action types which are
*
* @param value
* @return boolean
* @return void
*/
private void throwValueException(int value) {
- String error = "Invalid field value assignement. For type: "
- + type.getId() + " Expected: " + type.getRange() + ", Got: 0x"
- + Integer.toHexString(value);
+ String error = "Invalid field value assignement. For type: " + type.getId() + " Expected: " + type.getRange()
+ + ", Got: 0x" + Integer.toHexString(value);
try {
throw new Exception(error);
} catch (Exception e) {
logger.error(e.getMessage());
if (debug) {
- logger.error("",e);
+ logger.error("", e);
}
}
}
/**
* Returns whether the Action is valid or not
*
- * @return boolean
+ * @return boolean
*/
public boolean isValid() {
return isValid;
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
Action other = (Action) obj;
- if (type != other.type)
+ if (type != other.type) {
return false;
+ }
return true;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
/**
* The enumeration of actions supported by the controller
* Each entry has a unique id and the values range for the action element where applicable
- *
- *
- *
*/
public enum ActionType {
DROP("drop", 0, 0),
}
public String getRange() {
- return "[0x" + Long.toHexString(minValue) + "-0x"
- + Long.toHexString(maxValue) + "]";
+ return "[0x" + Long.toHexString(minValue) + "-0x" + Long.toHexString(maxValue) + "]";
}
public boolean takesParameter() {
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
/**
* Represents the action of punting the packet to the controller
- *
- *
- *
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class Controller extends Action {
+ private static final long serialVersionUID = 1L;
public Controller() {
type = ActionType.CONTROLLER;
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
/**
* Represent the action of dropping the matched packet
- *
- *
- *
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
public class Drop extends Action {
+ private static final long serialVersionUID = 1L;
+
public Drop() {
type = ActionType.DROP;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
/**
* Represents the action of flooding the packet out
- *
- *
- *
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class Flood extends Action {
+ private static final long serialVersionUID = 1L;
public Flood() {
type = ActionType.FLOOD;
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
import javax.xml.bind.annotation.XmlRootElement;
/**
- * Represents the action of flooding the packet out all the physical ports except the input port
- *
- *
- *
+ * Represents the action of flooding the packet out all the physical ports
+ * except the input port
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class FloodAll extends Action {
+ private static final long serialVersionUID = 1L;
public FloodAll() {
type = ActionType.FLOOD_ALL;
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
import javax.xml.bind.annotation.XmlRootElement;
/**
- * Represents the action of sending the packet to the local hardware path for processing
- *
- *
- *
+ * Represents the action of sending the packet to the local hardware path for
+ * processing
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class HwPath extends Action {
+ private static final long serialVersionUID = 1L;
public HwPath() {
type = ActionType.HW_PATH;
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
/**
* Represents the action of looping the packet back the port it came in from
- *
- *
- *
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class Loopback extends Action {
+ private static final long serialVersionUID = 1L;
public Loopback() {
type = ActionType.LOOPBACK;
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
/**
* Represents the action of sending the packet out of a physical port
- *
- *
- *
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class Output extends Action {
- @XmlElement
+ private static final long serialVersionUID = 1L;
+ @XmlElement
private NodeConnector port;
/* Dummy constructor for JAXB */
- private Output () {
+ @SuppressWarnings("unused")
+ private Output() {
}
public Output(NodeConnector port) {
type = ActionType.OUTPUT;
this.port = port;
- //checkValue(port);
+ // checkValue(port);
}
public NodeConnector getPort() {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
Output other = (Output) obj;
if (port == null) {
- if (other.port != null)
+ if (other.port != null) {
return false;
- } else if (!port.equals(other.port))
+ }
+ } else if (!port.equals(other.port)) {
return false;
+ }
return true;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
/**
* Pop vlan action (strip the outermost 802.1q header)
- *
- *
- *
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class PopVlan extends Action {
+ private static final long serialVersionUID = 1L;
public PopVlan() {
type = ActionType.POP_VLAN;
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
import org.opendaylight.controller.sal.utils.EtherTypes;
/**
- * Insert a 802.1q (outermost) header action
- * Execute it multiple times to achieve QinQ
- *
- * 802.1q = [TPID(16) + TCI(16)]
- * TCI = [PCP(3) + CFI(1) + VID(12)]
- *
- *
+ * Insert a 802.1q (outermost) header action Execute it multiple times to
+ * achieve QinQ
*
+ * 802.1q = [TPID(16) + TCI(16)] TCI = [PCP(3) + CFI(1) + VID(12)]
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class PushVlan extends Action {
+ private static final long serialVersionUID = 1L;
private int tag; // TPID - 16 bits
private int pcp; // PCP - 3 bits
private int cfi; // CFI - 1 bit (drop eligible)
private transient int header; // full 802.1q header [TPID + TCI] - 32 bits
/* Dummy constructor for JAXB */
- private PushVlan () {
+ @SuppressWarnings("unused")
+ private PushVlan() {
}
public PushVlan(int tag, int pcp, int cfi, int vlanId) {
}
private int createHeader() {
- return (tag & 0xffff) << 16 | (pcp & 0x7) << 13 | (cfi & 0x1) << 12
- | (vlanId & 0xfff);
+ return (tag & 0xffff) << 16 | (pcp & 0x7) << 13 | (cfi & 0x1) << 12 | (vlanId & 0xfff);
}
private void runChecks() {
checkValue(tci);
// Run action specific check which cannot be run by parent
- if (tag != EtherTypes.VLANTAGGED.intValue()
- && tag != EtherTypes.QINQ.intValue()
- && tag != EtherTypes.OLDQINQ.intValue()
- && tag != EtherTypes.CISCOQINQ.intValue()) {
- // pass a value which will tell fail and tell something about the original wrong value
+ if (tag != EtherTypes.VLANTAGGED.intValue() && tag != EtherTypes.QINQ.intValue()
+ && tag != EtherTypes.OLDQINQ.intValue() && tag != EtherTypes.CISCOQINQ.intValue()) {
+ // pass a value which will tell fail and tell something about the
+ // original wrong value
checkValue(ActionType.SET_DL_TYPE, 0xBAD << 16 | tag);
}
}
/**
- * Returns the VID portion of the 802.1q header this action will insert
- * VID - (12 bits)
+ * Returns the VID portion of the 802.1q header this action will insert VID
+ * - (12 bits)
+ *
* @return byte[]
*/
public int getVlanId() {
}
/**
- * Returns the CFI portion of the 802.1q header this action will insert
- * CFI - (1 bit)
+ * Returns the CFI portion of the 802.1q header this action will insert CFI
+ * - (1 bit)
+ *
* @return
*/
public int getCfi() {
/**
* Returns the vlan PCP portion of the 802.1q header this action will insert
* PCP - (3 bits)
+ *
* @return byte[]
*/
public int getPcp() {
}
/**
- * Returns the TCI portion of the 802.1q header this action will insert
- * TCI = [PCP + CFI + VID] - (16 bits)
+ * Returns the TCI portion of the 802.1q header this action will insert TCI
+ * = [PCP + CFI + VID] - (16 bits)
+ *
* @return
*/
public int getTci() {
}
/**
- * Returns the full 802.1q header this action will insert
- * header = [TPID + TIC] (32 bits)
+ * Returns the full 802.1q header this action will insert header = [TPID +
+ * TIC] (32 bits)
*
* @return int
*/
- @XmlElement(name="VlanHeader")
+ @XmlElement(name = "VlanHeader")
public int getHeader() {
return header;
}
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
PushVlan other = (PushVlan) obj;
- if (cfi != other.cfi)
+ if (cfi != other.cfi) {
return false;
- if (pcp != other.pcp)
+ }
+ if (pcp != other.pcp) {
return false;
- if (tag != other.tag)
+ }
+ if (tag != other.tag) {
return false;
- if (vlanId != other.vlanId)
+ }
+ if (vlanId != other.vlanId) {
return false;
+ }
return true;
}
@Override
public String toString() {
- return type + "[tag = " + tag + ", pcp = " + pcp + ", cfi = " + cfi
- + ", vlanId = " + vlanId + "]";
+ return type + "[tag = " + tag + ", pcp = " + pcp + ", cfi = " + cfi + ", vlanId = " + vlanId + "]";
}
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
/**
* Set destination datalayer address action
- *
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class SetDlDst extends Action {
+ private static final long serialVersionUID = 1L;
private byte[] address;
/* Dummy constructor for JAXB */
- private SetDlDst () {
+ @SuppressWarnings("unused")
+ private SetDlDst() {
}
public SetDlDst(byte[] dlAddress) {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
SetDlDst other = (SetDlDst) obj;
- if (!Arrays.equals(address, other.address))
+ if (!Arrays.equals(address, other.address)) {
return false;
+ }
return true;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class SetDlSrc extends Action {
+ private static final long serialVersionUID = 1L;
private byte[] address;
/* Dummy constructor for JAXB */
- private SetDlSrc () {
+ @SuppressWarnings("unused")
+ private SetDlSrc() {
}
public SetDlSrc(byte[] dlAddress) {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
SetDlSrc other = (SetDlSrc) obj;
- if (!Arrays.equals(address, other.address))
+ if (!Arrays.equals(address, other.address)) {
return false;
+ }
return true;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class SetDlType extends Action {
- @XmlElement
+ private static final long serialVersionUID = 1L;
+ @XmlElement
private int dlType;
/* Dummy constructor for JAXB */
- private SetDlType () {
+ @SuppressWarnings("unused")
+ private SetDlType() {
}
public SetDlType(int dlType) {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
SetDlType other = (SetDlType) obj;
- if (dlType != other.dlType)
+ if (dlType != other.dlType) {
return false;
+ }
return true;
}
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
public class SetNextHop extends Action {
+ private static final long serialVersionUID = 1L;
@XmlElement
private InetAddress address;
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class SetNwDst extends Action {
+ private static final long serialVersionUID = 1L;
InetAddress address;
/* Dummy constructor for JAXB */
- private SetNwDst () {
+ @SuppressWarnings("unused")
+ private SetNwDst() {
}
public SetNwDst(InetAddress address) {
/**
* Returns the network address this action will set
*
- * @return InetAddress
+ * @return InetAddress
*/
public InetAddress getAddress() {
return address;
}
- @XmlElement (name="address")
+ @XmlElement(name = "address")
public String getAddressAsString() {
return address.getHostAddress();
}
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
SetNwDst other = (SetNwDst) obj;
if (address == null) {
- if (other.address != null)
+ if (other.address != null) {
return false;
- } else if (!address.equals(other.address))
+ }
+ } else if (!address.equals(other.address)) {
return false;
+ }
return true;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class SetNwSrc extends Action {
+ private static final long serialVersionUID = 1L;
InetAddress address;
/* Dummy constructor for JAXB */
- private SetNwSrc () {
+ @SuppressWarnings("unused")
+ private SetNwSrc() {
}
public SetNwSrc(InetAddress address) {
/**
* Returns the network address this action will set
*
- * @return InetAddress
+ * @return InetAddress
*/
public InetAddress getAddress() {
return address;
}
- @XmlElement (name="address")
+ @XmlElement(name = "address")
public String getAddressAsString() {
return address.getHostAddress();
}
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
SetNwSrc other = (SetNwSrc) obj;
if (address == null) {
- if (other.address != null)
+ if (other.address != null) {
return false;
- } else if (!address.equals(other.address))
+ }
+ } else if (!address.equals(other.address)) {
return false;
+ }
return true;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class SetNwTos extends Action {
- @XmlElement
+ private static final long serialVersionUID = 1L;
+ @XmlElement
private int tos;
/* Dummy constructor for JAXB */
- private SetNwTos () {
+ @SuppressWarnings("unused")
+ private SetNwTos() {
}
public SetNwTos(int tos) {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
SetNwTos other = (SetNwTos) obj;
- if (tos != other.tos)
+ if (tos != other.tos) {
return false;
+ }
return true;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class SetTpDst extends Action {
- @XmlElement
+ private static final long serialVersionUID = 1L;
+ @XmlElement
private int port;
/* Dummy constructor for JAXB */
- private SetTpDst () {
+ @SuppressWarnings("unused")
+ private SetTpDst() {
}
public SetTpDst(int port) {
/**
* Returns the transport port the action will set
+ *
* @return
*/
public int getPort() {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
SetTpDst other = (SetTpDst) obj;
- if (port != other.port)
+ if (port != other.port) {
return false;
+ }
return true;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class SetTpSrc extends Action {
- @XmlElement
+ private static final long serialVersionUID = 1L;
+ @XmlElement
private int port;
/* Dummy constructor for JAXB */
- private SetTpSrc () {
+ @SuppressWarnings("unused")
+ private SetTpSrc() {
}
public SetTpSrc(int port) {
/**
* Returns the transport port the action will set
+ *
* @return
*/
public int getPort() {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
SetTpSrc other = (SetTpSrc) obj;
- if (port != other.port)
+ if (port != other.port) {
return false;
+ }
return true;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class SetVlanCfi extends Action {
- @XmlElement
+ private static final long serialVersionUID = 1L;
+ @XmlElement
private int cfi;
/* Dummy constructor for JAXB */
- private SetVlanCfi () {
+ @SuppressWarnings("unused")
+ private SetVlanCfi() {
}
public SetVlanCfi(int cfi) {
/**
* Returns the 802.1q CFI value that this action will set
+ *
* @return
*/
public int getCfi() {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
SetVlanCfi other = (SetVlanCfi) obj;
- if (cfi != other.cfi)
+ if (cfi != other.cfi) {
return false;
+ }
return true;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class SetVlanId extends Action {
- @XmlElement
+ private static final long serialVersionUID = 1L;
+ @XmlElement
private int vlanId;
- private SetVlanId() {
+ @SuppressWarnings("unused")
+ private SetVlanId() {
- }
+ }
public SetVlanId(int vlanId) {
type = ActionType.SET_VLAN_ID;
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
SetVlanId other = (SetVlanId) obj;
- if (vlanId != other.vlanId)
+ if (vlanId != other.vlanId) {
return false;
+ }
return true;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class SetVlanPcp extends Action {
- @XmlElement
+ private static final long serialVersionUID = 1L;
+ @XmlElement
private int pcp;
- private SetVlanPcp() {
+ @SuppressWarnings("unused")
+ private SetVlanPcp() {
- }
+ }
public SetVlanPcp(int pcp) {
type = ActionType.SET_VLAN_PCP;
/**
* Returns the value of the vlan PCP this action will set
+ *
* @return int
*/
public int getPcp() {
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (!super.equals(obj))
+ }
+ if (!super.equals(obj)) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
SetVlanPcp other = (SetVlanPcp) obj;
- if (pcp != other.pcp)
+ if (pcp != other.pcp) {
return false;
+ }
return true;
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
import javax.xml.bind.annotation.XmlRootElement;
/**
- * Represents the action of sending the packet to the local software path for processing
+ * Represents the action of sending the packet to the local software path for
+ * processing
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class SwPath extends Action {
+ private static final long serialVersionUID = 1L;
public SwPath() {
type = ActionType.SW_PATH;
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
/**
* Represent a flow: match + actions + flow specific properties
- *
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
public class Flow implements Cloneable, Serializable {
- protected static final Logger logger = LoggerFactory
- .getLogger(Flow.class);
- private static final long serialVersionUID = 1L;
- @XmlElement
+ protected static final Logger logger = LoggerFactory.getLogger(Flow.class);
+ private static final long serialVersionUID = 1L;
+ @XmlElement
private Match match;
@XmlElement
private List<Action> actions;
try {
throw new Exception("Conflicting Match and Action list");
} catch (Exception e) {
- logger.error("",e);
+ logger.error("", e);
}
} else {
this.match = match;
}
/**
- * Set the Match for this flow
- * This operation will overwrite an existing Match if present
+ * Set the Match for this flow This operation will overwrite an existing
+ * Match if present
*
* @param match
*/
/**
* Returns a copy of the actions list of this flow
+ *
* @return
*/
public List<Action> getActions() {
}
/**
- * Set the actions list for this flow
- * If a list is already present, it will be
- * replaced with the passed one. During
- * addition, only the valid actions will be added
- * It is a no op if the passed actions is null
- * An empty actions is a vlaid input
+ * Set the actions list for this flow If a list is already present, it will
+ * be replaced with the passed one. During addition, only the valid actions
+ * will be added It is a no op if the passed actions is null An empty
+ * actions is a vlaid input
*
* @param actions
*/
}
/**
- * Returns whether the Flow is for IPv4 or IPv6
- * Information is derived from match and actions list
+ * Returns whether the Flow is for IPv4 or IPv6 Information is derived from
+ * match and actions list
*
* @return
*/
}
/**
- * Returns true if it finds at least one action which is for IPv6
- * in the list of actions for this Flow
+ * Returns true if it finds at least one action which is for IPv6 in the
+ * list of actions for this Flow
*
* @return
*/
}
break;
case SET_DL_TYPE:
- if (((SetDlType) action).getDlType() == EtherTypes.IPv6
- .intValue()) {
+ if (((SetDlType) action).getDlType() == EtherTypes.IPv6.intValue()) {
return true;
}
break;
cloned.match = this.getMatch();
cloned.actions = this.getActions();
} catch (CloneNotSupportedException e) {
- logger.error("",e);
+ logger.error("", e);
}
return cloned;
}
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
Flow other = (Flow) obj;
if (actions == null) {
- if (other.actions != null)
+ if (other.actions != null) {
return false;
- } else if (!actions.equals(other.actions))
+ }
+ } else if (!actions.equals(other.actions)) {
return false;
- if (hardTimeout != other.hardTimeout)
+ }
+ if (hardTimeout != other.hardTimeout) {
return false;
- if (id != other.id)
+ }
+ if (id != other.id) {
return false;
- if (idleTimeout != other.idleTimeout)
+ }
+ if (idleTimeout != other.idleTimeout) {
return false;
+ }
if (match == null) {
- if (other.match != null)
+ if (other.match != null) {
return false;
- } else if (!match.equals(other.match))
+ }
+ } else if (!match.equals(other.match)) {
return false;
- if (priority != other.priority)
+ }
+ if (priority != other.priority) {
return false;
+ }
return true;
}
@Override
public String toString() {
- return "Flow[match = " + match + ", actions = " + actions
- + ", priority = " + priority + ", id = " + id
- + ", idleTimeout = " + idleTimeout + ", hardTimeout = "
- + hardTimeout + "]";
+ return "Flow[match = " + match + ", actions = " + actions + ", priority = " + priority + ", id = " + id
+ + ", idleTimeout = " + idleTimeout + ", hardTimeout = " + hardTimeout + "]";
}
public short getPriority() {
* Adds the specified action to the list of action of this flow
*
* @param action
- * @return false if the passed action is null or not valid or if it fails to add it
+ * @return false if the passed action is null or not valid or if it fails to
+ * add it
*/
public boolean addAction(Action action) {
if (action == null || !action.isValid()) {
}
/**
- * remove ALL actions of type actionType from the list of actions of this flow
+ * remove ALL actions of type actionType from the list of actions of this
+ * flow
*
* @param actionType
- * @return false if an action of that type is present and it fails to remove it
+ * @return false if an action of that type is present and it fails to remove
+ * it
*/
public boolean removeAction(ActionType actionType) {
Iterator<Action> actionIter = this.getActions().iterator();
while (actionIter.hasNext()) {
Action action = actionIter.next();
if (action.getType() == actionType) {
- if (!this.removeAction(action))
+ if (!this.removeAction(action)) {
return false;
+ }
}
}
return true;
* functional modules the asynchronous messages related to flow programming
* coming from the network nodes.
*/
-public interface IFlowProgrammerListener extends
- IPluginOutFlowProgrammerService {
+public interface IFlowProgrammerListener extends IPluginOutFlowProgrammerService {
}
Match cloned = null;
try {
cloned = (Match) super.clone();
+ cloned.matches = matches;
cloned.fields = new HashMap<MatchType, MatchField>();
for (Entry<MatchType, MatchField> entry : this.fields.entrySet()) {
cloned.fields.put(entry.getKey(), entry.getValue().clone());
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
Match other = (Match) obj;
if (fields == null) {
- if (other.fields != null)
+ if (other.fields != null) {
return false;
- } else if (!fields.equals(other.fields))
+ }
+ } else if (!fields.equals(other.fields)) {
return false;
- if (matches != other.matches)
+ }
+ if (matches != other.matches) {
return false;
+ }
return true;
}
@XmlAccessorType(XmlAccessType.NONE)
public class MatchField implements Cloneable, Serializable {
private static final long serialVersionUID = 1L;
- private static final Logger logger = LoggerFactory
- .getLogger(MatchField.class);
+ private static final Logger logger = LoggerFactory.getLogger(MatchField.class);
private MatchType type; // the field we want to match
private Object value; // the value of the field we want to match
private Object mask; // the value of the mask we want to match on the
- // specified field
+ // specified field
private transient boolean isValid;
// To satisfy JAXB
private boolean checkValueType() {
if (type.isCongruentType(value, mask) == false) {
- String valueClass = (value == null) ? "null" : value.getClass()
- .getSimpleName();
- String maskClass = (mask == null) ? "null" : mask.getClass()
- .getSimpleName();
- String error = "Invalid match field's value or mask types.For field: "
- + type.id()
- + " Expected:"
- + type.dataType().getSimpleName()
- + " or equivalent,"
- + " Got:(" + valueClass + "," + maskClass + ")";
+ String valueClass = (value == null) ? "null" : value.getClass().getSimpleName();
+ String maskClass = (mask == null) ? "null" : mask.getClass().getSimpleName();
+ String error = "Invalid match field's value or mask types.For field: " + type.id() + " Expected:"
+ + type.dataType().getSimpleName() + " or equivalent," + " Got:(" + valueClass + "," + maskClass
+ + ")";
throwException(error);
return false;
}
if (type.isValid(value, mask) == false) {
String maskString = (mask == null) ? "null" : ("0x" + Integer
.toHexString(Integer.parseInt(mask.toString())));
- String error = "Invalid match field's value or mask assignement.For field: "
- + type.id()
- + " Expected: "
- + type.getRange()
- + ", Got:(0x"
- + Integer.toHexString(Integer.parseInt(value.toString()))
- + "," + maskString + ")";
+ String error = "Invalid match field's value or mask assignement.For field: " + type.id() + " Expected: "
+ + type.getRange() + ", Got:(0x" + Integer.toHexString(Integer.parseInt(value.toString())) + ","
+ + maskString + ")";
throwException(error);
return false;
}
}
} catch (CloneNotSupportedException e) {
- logger.error("",e);
+ logger.error("", e);
}
return cloned;
}
@Override
public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((mask == null) ? 0 : mask.hashCode());
- result = prime * result + ((type == null) ? 0 : type.hashCode());
- result = prime * result + ((value == null) ? 0 : value.hashCode());
- return result;
+ return type.hashCode(value, mask);
}
@Override
if (type != other.type) {
return false;
}
- return (type.equalValues(this.value, other.value) && type.equalMasks(
- this.mask, other.mask));
+ return (type.equalValues(this.value, other.value) && type.equalMasks(this.mask, other.mask));
}
}
package org.opendaylight.controller.sal.match;
+import java.net.Inet6Address;
import java.net.InetAddress;
import java.util.Arrays;
* Represents the binding between the id, the value and mask type and the range
* values of the elements type that can be matched on the network
* frame/packet/message
- *
- *
- *
*/
public enum MatchType {
IN_PORT("inPort", 1 << 0, NodeConnector.class, 1, 0),
private long minValue;
private long maxValue;
- private MatchType(String id, int index, Class<?> dataType, long minValue,
- long maxValue) {
+ private MatchType(String id, int index, Class<?> dataType, long minValue, long maxValue) {
this.id = id;
this.index = index;
this.dataType = dataType;
}
public String getRange() {
- return "[0x" + Long.toHexString(minValue) + "-0x"
- + Long.toHexString(maxValue) + "]";
+ return "[0x" + Long.toHexString(minValue) + "-0x" + Long.toHexString(maxValue) + "]";
}
/**
val = ((Integer) value).intValue();
msk = (mask != null) ? ((Integer) mask).intValue() : 0;
- } else if (value.getClass() == Short.class
- || value.getClass() == short.class) {
+ } else if (value.getClass() == Short.class || value.getClass() == short.class) {
val = ((Short) value).intValue() & 0xffff;
msk = (mask != null) ? ((Short) mask).intValue() & 0xffff : 0;
- } else if (value.getClass() == Byte.class
- || value.getClass() == byte.class) {
+ } else if (value.getClass() == Byte.class || value.getClass() == byte.class) {
val = ((Byte) value).intValue() & 0xff;
msk = (mask != null) ? ((Byte) mask).intValue() & 0xff : 0;
}
byte mac[] = (byte[]) mask;
long bitmask = 0;
for (short i = 0; i < 6; i++) {
- bitmask |= (((long) mac[i] & 0xffL) << ((5 - i) * 8));
+ bitmask |= ((mac[i] & 0xffL) << ((5 - i) * 8));
}
return bitmask;
}
if (this.dataType == Integer.class || this.dataType == int.class) {
- return (mask == null) ? this.maxValue : ((Integer) mask)
- .longValue();
+ return (mask == null) ? this.maxValue : ((Integer) mask).longValue();
}
if (this.dataType == Short.class || this.dataType == short.class) {
return HexEncode.bytesToHexStringFormat((byte[]) value);
case DL_TYPE:
case DL_VLAN:
- return ((Integer) NetUtils.getUnsignedShort((Short) value))
- .toString();
+ return ((Integer) NetUtils.getUnsignedShort((Short) value)).toString();
case NW_SRC:
case NW_DST:
return ((InetAddress) value).getHostAddress();
case NW_TOS:
- return ((Integer) NetUtils.getUnsignedByte((Byte) value))
- .toString();
+ return ((Integer) NetUtils.getUnsignedByte((Byte) value)).toString();
case TP_SRC:
case TP_DST:
- return ((Integer) NetUtils.getUnsignedShort((Short) value))
- .toString();
+ return ((Integer) NetUtils.getUnsignedShort((Short) value)).toString();
default:
break;
}
return value.toString();
}
+ public int valueHashCode(Object o) {
+ if (o == null) {
+ return 0;
+ }
+ switch (this) {
+ case DL_SRC:
+ case DL_DST:
+ return NetUtils.byteArray4ToInt((byte[])o);
+ default:
+ return o.hashCode();
+ }
+ }
+
+ public int hashCode(Object v, Object m) {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + this.hashCode();
+
+ switch (this) {
+ case DL_SRC:
+ case DL_DST:
+ result = prime * result + ((v == null)? 0 : NetUtils.byteArray4ToInt((byte[])v));
+ result = prime * result + ((m == null)? 0 : NetUtils.byteArray4ToInt((byte[])m));
+ break;
+ case NW_SRC:
+ case NW_DST:
+ result = prime * result + ((v == null)? 0 : v.hashCode());
+ result = prime * result + ((m == null)? NetUtils.gethighestIP(v instanceof Inet6Address).hashCode() : m.hashCode());
+ break;
+ default:
+ result = prime * result + ((v == null)? 0 : v.hashCode());
+ result = prime * result + ((m == null)? 0 : m.hashCode());
+ }
+ return result;
+ }
public boolean equalValues(Object a, Object b) {
if (a == b) {
return true;
*/
short computeChecksum(byte[] data, int start) {
int sum = 0, carry = 0, finalSum = 0;
+ int wordData;
int end = start + this.getHeaderSize() / NetUtils.NumBitsInAByte
+ rawPayload.length;
int checksumStartByte = start + getfieldOffset(CHECKSUM)
if (i == checksumStartByte) {
continue;
}
- StringBuffer sbuffer = new StringBuffer();
- sbuffer.append(String.format("%02X", data[i]));
- if (i < (data.length - 1)) {
- sbuffer.append(String.format("%02X", data[i + 1]));
- }
- sum += Integer.valueOf(sbuffer.toString(), 16);
+ wordData = ((data[i] << 8) & 0xFF00) + (data[i + 1] & 0xFF);
+ sum = sum + wordData;
}
carry = (sum >> 16) & 0xFF;
finalSum = (sum & 0xFFFF) + carry;
int end = start + getHeaderLen();
short checkSum = (short) 0;
int sum = 0, carry = 0, finalSum = 0;
- int parsedHex = 0;
+ int wordData;
int checksumStart = start
+ (getfieldOffset(CHECKSUM) / NetUtils.NumBitsInAByte);
if (i == checksumStart) {
continue;
}
- StringBuffer sbuffer = new StringBuffer();
- sbuffer.append(String.format("%02X", data[i]));
- if (i < (data.length - 1)) {
- sbuffer.append(String.format("%02X", data[i + 1]));
- }
-
- parsedHex = Integer.valueOf(sbuffer.toString(), 16);
- sum += parsedHex;
+ wordData = ((data[i] << 8) & 0xFF00) + (data[i + 1] & 0xFF);
+ sum = sum + wordData;
}
carry = (sum >> 16) & 0xFF;
finalSum = (sum & 0xFFFF) + carry;
private long byteCount;
/* Dummy constructor for JAXB */
+ @SuppressWarnings("unused")
private FlowOnNode () {
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
import org.slf4j.LoggerFactory;
/**
- * Utility class containing the common utility functions needed
- * for operating on networking data structures
- *
- *
- *
+ * Utility class containing the common utility functions needed for operating on
+ * networking data structures
*/
public abstract class NetUtils {
- protected static final Logger logger = LoggerFactory
- .getLogger(NetUtils.class);
+ protected static final Logger logger = LoggerFactory.getLogger(NetUtils.class);
/**
* Constant holding the number of bits in a byte
*/
/**
* Converts a 4 bytes array into an integer number
*
- * @param ba the 4 bytes long byte array
- * @return the integer number
+ * @param ba
+ * the 4 bytes long byte array
+ * @return the integer number
*/
public static int byteArray4ToInt(byte[] ba) {
if (ba == null || ba.length != 4) {
return 0;
}
- return (0xff & ba[0]) << 24 | (0xff & ba[1]) << 16
- | (0xff & ba[2]) << 8 | (0xff & ba[3]);
+ return (0xff & ba[0]) << 24 | (0xff & ba[1]) << 16 | (0xff & ba[2]) << 8 | (0xff & ba[3]);
}
/**
/**
* Converts an integer number into a 4 bytes array
*
- * @param i the integer number
- * @return the byte array
+ * @param i
+ * the integer number
+ * @return the byte array
*/
public static byte[] intToByteArray4(int i) {
- return new byte[] { (byte) ((i >> 24) & 0xff),
- (byte) ((i >> 16) & 0xff), (byte) ((i >> 8) & 0xff),
+ return new byte[] { (byte) ((i >> 24) & 0xff), (byte) ((i >> 16) & 0xff), (byte) ((i >> 8) & 0xff),
(byte) (i & 0xff) };
}
/**
- * Converts an IP address passed as integer value into the
- * respective InetAddress object
+ * Converts an IP address passed as integer value into the respective
+ * InetAddress object
*
- * @param address the IP address in integer form
- * @return the IP address in InetAddress form
+ * @param address
+ * the IP address in integer form
+ * @return the IP address in InetAddress form
*/
public static InetAddress getInetAddress(int address) {
InetAddress ip = null;
try {
ip = InetAddress.getByAddress(NetUtils.intToByteArray4(address));
} catch (UnknownHostException e) {
- logger.error("",e);
+ logger.error("", e);
}
return ip;
}
/**
- * Return the InetAddress Network Mask given the length of the prefix bit mask.
- * The prefix bit mask indicates the contiguous leading bits that are NOT masked out.
- * Example: A prefix bit mask length of 8 will give an InetAddress Network Mask of 255.0.0.0
+ * Return the InetAddress Network Mask given the length of the prefix bit
+ * mask. The prefix bit mask indicates the contiguous leading bits that are
+ * NOT masked out. Example: A prefix bit mask length of 8 will give an
+ * InetAddress Network Mask of 255.0.0.0
*
- * @param prefixMaskLength integer representing the length of the prefix network mask
- * @param isV6 boolean representing the IP version of the returned address
+ * @param prefixMaskLength
+ * integer representing the length of the prefix network mask
+ * @param isV6
+ * boolean representing the IP version of the returned address
* @return
*/
- public static InetAddress getInetNetworkMask(int prefixMaskLength,
- boolean isV6) {
- if (prefixMaskLength < 0 || (!isV6 && prefixMaskLength > 32)
- || (isV6 && prefixMaskLength > 128)) {
+ public static InetAddress getInetNetworkMask(int prefixMaskLength, boolean isV6) {
+ if (prefixMaskLength < 0 || (!isV6 && prefixMaskLength > 32) || (isV6 && prefixMaskLength > 128)) {
return null;
}
byte v4Address[] = { 0, 0, 0, 0 };
try {
return InetAddress.getByAddress(address);
} catch (UnknownHostException e) {
- logger.error("",e);
+ logger.error("", e);
}
return null;
}
/**
- * Returns the number of contiguous bits belonging to the subnet, that have to be masked out
- * Example: A prefix network byte mask of ff.ff.ff.00 will give a subnet mask length of 8,
- * while ff.00.00.00 will return a subnet mask length of 24.
- * If the passed prefixMask object is null, 0 is returned
+ * Returns the number of contiguous bits belonging to the subnet, that have
+ * to be masked out Example: A prefix network byte mask of ff.ff.ff.00 will
+ * give a subnet mask length of 8, while ff.00.00.00 will return a subnet
+ * mask length of 24. If the passed prefixMask object is null, 0 is returned
*
- * @param prefixMask the prefix mask as byte array
- * @return the length of the prefix network mask
+ * @param prefixMask
+ * the prefix mask as byte array
+ * @return the length of the prefix network mask
*/
public static int getSubnetMaskLength(byte[] prefixMask) {
int maskLength = 0;
}
/**
- * Returns the number of contiguous bits belonging to the subnet, that have to be masked out
- * Example: A prefix network byte mask of ff.ff.ff.00 will give a subnet mask length of 8,
- * while ff.00.00.00 will return a subnet mask length of 24
- * If the passed prefixMask object is null, 0 is returned
+ * Returns the number of contiguous bits belonging to the subnet, that have
+ * to be masked out Example: A prefix network byte mask of ff.ff.ff.00 will
+ * give a subnet mask length of 8, while ff.00.00.00 will return a subnet
+ * mask length of 24 If the passed prefixMask object is null, 0 is returned
*
- * @param prefixMask the prefix mask as InetAddress
- * @return the length of the prefix network mask
+ * @param prefixMask
+ * the prefix mask as InetAddress
+ * @return the length of the prefix network mask
*/
public static int getSubnetMaskLength(InetAddress prefixMask) {
- return (prefixMask == null) ? 0 : NetUtils
- .getSubnetMaskLength(prefixMask.getAddress());
+ return (prefixMask == null) ? 0 : NetUtils.getSubnetMaskLength(prefixMask.getAddress());
}
/**
- * Given an IP address and a prefix network mask length, it returns
- * the equivalent subnet prefix IP address
- * Example: for ip = "172.28.30.254" and maskLen = 25 it will return "172.28.30.128"
+ * Given an IP address and a prefix network mask length, it returns the
+ * equivalent subnet prefix IP address Example: for ip = "172.28.30.254" and
+ * maskLen = 25 it will return "172.28.30.128"
*
- * @param ip the IP address in InetAddress form
- * @param maskLen the length of the prefix network mask
- * @return the subnet prefix IP address in InetAddress form
+ * @param ip
+ * the IP address in InetAddress form
+ * @param maskLen
+ * the length of the prefix network mask
+ * @return the subnet prefix IP address in InetAddress form
*/
public static InetAddress getSubnetPrefix(InetAddress ip, int maskLen) {
int bytes = maskLen / 8;
}
/**
- * Checks if the test address and mask conflicts with
- * the filter address and mask
+ * Checks if the test address and mask conflicts with the filter address and
+ * mask
*
- * For example:
- * testAddress: 172.28.2.23 testMask: 255.255.255.0
- * filtAddress: 172.28.1.10 testMask: 255.255.255.0
- * conflict
+ * For example: testAddress: 172.28.2.23 testMask: 255.255.255.0
+ * filtAddress: 172.28.1.10 testMask: 255.255.255.0 conflict
*
- * testAddress: 172.28.2.23 testMask: 255.255.255.0
- * filtAddress: 172.28.1.10 testMask: 255.255.0.0
- * do not conflict
+ * testAddress: 172.28.2.23 testMask: 255.255.255.0 filtAddress: 172.28.1.10
+ * testMask: 255.255.0.0 do not conflict
*
* Null parameters are permitted
*
* @param filterMask
* @return
*/
- public static boolean inetAddressConflict(InetAddress testAddress,
- InetAddress filterAddress, InetAddress testMask,
+ public static boolean inetAddressConflict(InetAddress testAddress, InetAddress filterAddress, InetAddress testMask,
InetAddress filterMask) {
// Sanity check
if ((testAddress == null) || (filterAddress == null)) {
/**
* Returns true if the passed MAC address is all zero
*
- * @param mac the byte array representing the MAC address
- * @return true if all MAC bytes are zero
+ * @param mac
+ * the byte array representing the MAC address
+ * @return true if all MAC bytes are zero
*/
public static boolean isZeroMAC(byte[] mac) {
for (short i = 0; i < 6; i++) {
/**
* Returns true if the passed InetAddress contains all zero
*
- * @param ip the IP address to test
- * @return true if the address is all zero
+ * @param ip
+ * the IP address to test
+ * @return true if the address is all zero
*/
public static boolean isAny(InetAddress ip) {
for (byte b : ip.getAddress()) {
try {
address = InetAddress.getByName(addressString);
} catch (UnknownHostException e) {
- logger.error("",e);
+ logger.error("", e);
}
return address;
}
/**
- * Checks if the passed IP v4 address in string form is valid
- * The address may specify a mask at the end as "/MM"
+ * Checks if the passed IP v4 address in string form is valid The address
+ * may specify a mask at the end as "/MM"
*
- * @param cidr the v4 address as A.B.C.D/MM
+ * @param cidr
+ * the v4 address as A.B.C.D/MM
* @return
*/
public static boolean isIPv4AddressValid(String cidr) {
}
/**
- * Checks if the passed IP v6 address in string form is valid
- * The address may specify a mask at the end as "/MMM"
+ * Checks if the passed IP v6 address in string form is valid The address
+ * may specify a mask at the end as "/MMM"
*
- * @param cidr the v6 address as A::1/MMM
+ * @param cidr
+ * the v6 address as A::1/MMM
* @return
*/
public static boolean isIPv6AddressValid(String cidr) {
String values[] = cidr.split("/");
try {
- //when given an IP address, InetAddress.getByName validates the ip address
+ // when given an IP address, InetAddress.getByName validates the ip
+ // address
InetAddress addr = InetAddress.getByName(values[0]);
if (!(addr instanceof Inet6Address)) {
return false;
* @return
*/
public static boolean isIPAddressValid(String cidr) {
- return NetUtils.isIPv4AddressValid(cidr)
- || NetUtils.isIPv6AddressValid(cidr);
+ return NetUtils.isIPv4AddressValid(cidr) || NetUtils.isIPv6AddressValid(cidr);
}
/*
- * Following utilities are useful when you need to
- * compare or bit shift java primitive type variable
- * which are inerently signed
+ * Following utilities are useful when you need to compare or bit shift java
+ * primitive type variable which are inerently signed
*/
/**
* Returns the unsigned value of the passed byte variable
*
- * @param b the byte value
+ * @param b
+ * the byte value
* @return the int variable containing the unsigned byte value
*/
public static int getUnsignedByte(byte b) {
- return (b > 0)? (int)b : (b & 0x7F | 0x80);
+ return (b > 0) ? (int) b : (b & 0x7F | 0x80);
}
/**
* Return the unsigned value of the passed short variable
*
- * @param s the short value
+ * @param s
+ * the short value
* @return the int variable containing the unsigned short value
*/
public static int getUnsignedShort(short s) {
- return (s > 0)? (int)s : (s & 0x7FFF | 0x8000);
+ return (s > 0) ? (int) s : (s & 0x7FFF | 0x8000);
+ }
+
+ /**
+ * Returns the highest v4 or v6 InetAddress
+ *
+ * @param v6
+ * true for IPv6, false for Ipv4
+ * @return The highest IPv4 or IPv6 address
+ */
+ public static InetAddress gethighestIP(boolean v6) {
+ try {
+ return (v6) ? InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff") : InetAddress
+ .getByName("255.255.255.255");
+ } catch (UnknownHostException e) {
+ return null;
+ }
}
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
@Test
public void testMatchCreation() {
Node node = NodeCreator.createOFNode(7l);
- NodeConnector port = NodeConnectorCreator.createOFNodeConnector(
- (short) 6, node);
+ NodeConnector port = NodeConnectorCreator.createOFNodeConnector((short) 6, node);
MatchField field = new MatchField(MatchType.IN_PORT, port);
Assert.assertTrue(field != null);
field = new MatchField(MatchType.TP_SRC, 120000);
Assert.assertFalse(field.isValid());
- byte mac[] = { (byte) 0xaa, (byte) 0xbb, (byte) 0xcc, (byte) 0xdd,
- (byte) 11, (byte) 22 };
- byte mask[] = { (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
- (byte) 0xff, (byte) 0xff };
+ byte mac[] = { (byte) 0xaa, (byte) 0xbb, (byte) 0xcc, (byte) 0xdd, (byte) 11, (byte) 22 };
+ byte mask[] = { (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff };
field = null;
field = new MatchField(MatchType.DL_SRC, mac, mask);
Assert.assertFalse(field.getValue() == null);
public void testMatchSetGet() {
Match x = new Match();
short val = 2346;
- NodeConnector inPort = NodeConnectorCreator.createOFNodeConnector(val,
- NodeCreator.createOFNode(1l));
+ NodeConnector inPort = NodeConnectorCreator.createOFNodeConnector(val, NodeCreator.createOFNode(1l));
x.setField(MatchType.IN_PORT, inPort);
- Assert.assertTrue(((NodeConnector) x.getField(MatchType.IN_PORT)
- .getValue()).equals(inPort));
- Assert
- .assertTrue((Short) ((NodeConnector) x.getField(
- MatchType.IN_PORT).getValue()).getID() == val);
+ Assert.assertTrue(((NodeConnector) x.getField(MatchType.IN_PORT).getValue()).equals(inPort));
+ Assert.assertTrue((Short) ((NodeConnector) x.getField(MatchType.IN_PORT).getValue()).getID() == val);
}
@Test
public void testMatchSetGetMAC() {
Match x = new Match();
- byte mac[] = { (byte) 0xaa, (byte) 0xbb, (byte) 0xcc, (byte) 0xdd,
- (byte) 11, (byte) 22 };
+ byte mac[] = { (byte) 0xaa, (byte) 0xbb, (byte) 0xcc, (byte) 0xdd, (byte) 11, (byte) 22 };
byte mac2[] = { (byte) 0xaa, (byte) 0xbb, 0, 0, 0, (byte) 0xbb };
- byte mask1[] = { (byte) 0x11, (byte) 0x22, (byte) 0x33, (byte) 0x44,
- (byte) 0x55, (byte) 0x66 };
- byte mask2[] = { (byte) 0xff, (byte) 0xff, (byte) 0, (byte) 0,
- (byte) 0, (byte) 0xff };
+ byte mask1[] = { (byte) 0x11, (byte) 0x22, (byte) 0x33, (byte) 0x44, (byte) 0x55, (byte) 0x66 };
+ byte mask2[] = { (byte) 0xff, (byte) 0xff, (byte) 0, (byte) 0, (byte) 0, (byte) 0xff };
x.setField(MatchType.DL_SRC, mac.clone(), mask1);
x.setField(MatchType.DL_DST, mac2.clone(), mask2);
- Assert.assertTrue(Arrays.equals(mac, (byte[]) x.getField(
- MatchType.DL_SRC).getValue()));
- Assert.assertFalse(Arrays.equals((byte[]) x.getField(MatchType.DL_SRC)
- .getValue(), (byte[]) x.getField(MatchType.DL_DST).getValue()));
- Assert.assertFalse(x.getField(MatchType.DL_SRC).getBitMask() == x
- .getField(MatchType.DL_DST).getBitMask());
+ Assert.assertTrue(Arrays.equals(mac, (byte[]) x.getField(MatchType.DL_SRC).getValue()));
+ Assert.assertFalse(Arrays.equals((byte[]) x.getField(MatchType.DL_SRC).getValue(),
+ (byte[]) x.getField(MatchType.DL_DST).getValue()));
+ Assert.assertFalse(x.getField(MatchType.DL_SRC).getBitMask() == x.getField(MatchType.DL_DST).getBitMask());
x.setField(new MatchField(MatchType.DL_DST, mac.clone(), mask1));
- Assert.assertTrue(Arrays.equals((byte[]) x.getField(MatchType.DL_SRC)
- .getValue(), (byte[]) x.getField(MatchType.DL_DST).getValue()));
+ Assert.assertTrue(Arrays.equals((byte[]) x.getField(MatchType.DL_SRC).getValue(),
+ (byte[]) x.getField(MatchType.DL_DST).getValue()));
}
@Test
InetAddress mask = InetAddress.getByName("255.255.0.0");
x.setField(MatchType.NW_SRC, address, mask);
- Assert.assertTrue(ip.equals(((InetAddress) x.getField(MatchType.NW_SRC)
- .getValue()).getHostAddress()));
+ Assert.assertTrue(ip.equals(((InetAddress) x.getField(MatchType.NW_SRC).getValue()).getHostAddress()));
Assert.assertTrue(x.getField(MatchType.NW_SRC).getMask().equals(mask));
}
public void testMatchSetGetEtherType() throws UnknownHostException {
Match x = new Match();
- x.setField(MatchType.DL_TYPE, EtherTypes.QINQ.shortValue(),
- (short) 0xffff);
- Assert.assertTrue(((Short) x.getField(MatchType.DL_TYPE).getValue())
- .equals(EtherTypes.QINQ.shortValue()));
- Assert
- .assertFalse(x.getField(MatchType.DL_TYPE).getValue() == EtherTypes.QINQ);
- Assert.assertFalse(x.getField(MatchType.DL_TYPE).getValue().equals(
- EtherTypes.QINQ));
-
- x.setField(MatchType.DL_TYPE, EtherTypes.LLDP.shortValue(),
- (short) 0xffff);
- Assert.assertTrue(((Short) x.getField(MatchType.DL_TYPE).getValue())
- .equals(EtherTypes.LLDP.shortValue()));
- Assert.assertFalse(x.getField(MatchType.DL_TYPE).equals(
- EtherTypes.LLDP.intValue()));
+ x.setField(MatchType.DL_TYPE, EtherTypes.QINQ.shortValue(), (short) 0xffff);
+ Assert.assertTrue(((Short) x.getField(MatchType.DL_TYPE).getValue()).equals(EtherTypes.QINQ.shortValue()));
+ Assert.assertFalse(x.getField(MatchType.DL_TYPE).getValue() == EtherTypes.QINQ);
+ Assert.assertFalse(x.getField(MatchType.DL_TYPE).getValue().equals(EtherTypes.QINQ));
+
+ x.setField(MatchType.DL_TYPE, EtherTypes.LLDP.shortValue(), (short) 0xffff);
+ Assert.assertTrue(((Short) x.getField(MatchType.DL_TYPE).getValue()).equals(EtherTypes.LLDP.shortValue()));
+ Assert.assertFalse(x.getField(MatchType.DL_TYPE).equals(EtherTypes.LLDP.intValue()));
}
@Test
@Test
public void testMatchMask() {
Match x = new Match();
- NodeConnector inPort = NodeConnectorCreator.createOFNodeConnector(
- (short) 6, NodeCreator.createOFNode(3l));
+ NodeConnector inPort = NodeConnectorCreator.createOFNodeConnector((short) 6, NodeCreator.createOFNode(3l));
x.setField(MatchType.IN_PORT, inPort);
x.setField(MatchType.DL_VLAN, (short) 28, (short) 0xfff);
Assert.assertFalse(x.getMatches() == 0);
- Assert
- .assertTrue(x.getMatches() == (MatchType.IN_PORT.getIndex() | MatchType.DL_VLAN
- .getIndex()));
+ Assert.assertTrue(x.getMatches() == (MatchType.IN_PORT.getIndex() | MatchType.DL_VLAN.getIndex()));
}
@Test
public void testMatchBitMask() {
- byte mac[] = { (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00,
- (byte) 22, (byte) 12 };
- byte mask[] = { (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
- (byte) 0xff, (byte) 0 };
- NodeConnector inPort = NodeConnectorCreator.createOFNodeConnector(
- (short) 4095, NodeCreator.createOFNode(7l));
+ byte mac[] = { (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 22, (byte) 12 };
+ byte mask[] = { (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0 };
+ NodeConnector inPort = NodeConnectorCreator.createOFNodeConnector((short) 4095, NodeCreator.createOFNode(7l));
MatchField x = new MatchField(MatchType.IN_PORT, inPort);
Assert.assertTrue((x.getMask()) == null);
@Test
public void testNullMask() {
- byte mac[] = { (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00,
- (byte) 22, (byte) 12 };
- NodeConnector inPort = NodeConnectorCreator.createOFNodeConnector(
- (short) 2000, NodeCreator.createOFNode(7l));
+ byte mac[] = { (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 22, (byte) 12 };
+ NodeConnector inPort = NodeConnectorCreator.createOFNodeConnector((short) 2000, NodeCreator.createOFNode(7l));
MatchField x = new MatchField(MatchType.IN_PORT, inPort);
Assert.assertTrue(x.getBitMask() == 0);
@Test
public void testEquality() throws Exception {
Node node = NodeCreator.createOFNode(7l);
- NodeConnector port = NodeConnectorCreator.createOFNodeConnector(
- (short) 24, node);
- NodeConnector port2 = NodeConnectorCreator.createOFNodeConnector(
- (short) 24, node);
- byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78,
- (byte) 0x9a, (byte) 0xbc };
- byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d,
- (byte) 0x5e, (byte) 0x6f };
- byte srcMac2[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78,
- (byte) 0x9a, (byte) 0xbc };
- byte dstMac2[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d,
- (byte) 0x5e, (byte) 0x6f };
- InetAddress srcIP = InetAddress
- .getByName("2001:420:281:1004:407a:57f4:4d15:c355");
- InetAddress dstIP = InetAddress
- .getByName("2001:420:281:1004:e123:e688:d655:a1b0");
- InetAddress ipMask = InetAddress
- .getByName("ffff:ffff:ffff:ffff:0:0:0:0");
- InetAddress ipMaskd = InetAddress
- .getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:0");
- InetAddress srcIP2 = InetAddress
- .getByName("2001:420:281:1004:407a:57f4:4d15:c355");
- InetAddress dstIP2 = InetAddress
- .getByName("2001:420:281:1004:e123:e688:d655:a1b0");
- InetAddress ipMask2 = InetAddress
- .getByName("ffff:ffff:ffff:ffff:0:0:0:0");
- InetAddress ipMaskd2 = InetAddress
- .getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:0");
+ NodeConnector port = NodeConnectorCreator.createOFNodeConnector((short) 24, node);
+ NodeConnector port2 = NodeConnectorCreator.createOFNodeConnector((short) 24, node);
+ byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78, (byte) 0x9a, (byte) 0xbc };
+ byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d, (byte) 0x5e, (byte) 0x6f };
+ byte srcMac2[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78, (byte) 0x9a, (byte) 0xbc };
+ byte dstMac2[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d, (byte) 0x5e, (byte) 0x6f };
+ InetAddress srcIP = InetAddress.getByName("2001:420:281:1004:407a:57f4:4d15:c355");
+ InetAddress dstIP = InetAddress.getByName("2001:420:281:1004:e123:e688:d655:a1b0");
+ InetAddress ipMask = InetAddress.getByName("ffff:ffff:ffff:ffff:0:0:0:0");
+ InetAddress ipMaskd = InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:0");
+ InetAddress srcIP2 = InetAddress.getByName("2001:420:281:1004:407a:57f4:4d15:c355");
+ InetAddress dstIP2 = InetAddress.getByName("2001:420:281:1004:e123:e688:d655:a1b0");
+ InetAddress ipMask2 = InetAddress.getByName("ffff:ffff:ffff:ffff:0:0:0:0");
+ InetAddress ipMaskd2 = InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:0");
short ethertype = EtherTypes.IPv6.shortValue();
short ethertype2 = EtherTypes.IPv6.shortValue();
short vlan = (short) 27, vlan2 = (short) 27;
byte vlanPr = (byte) 3, vlanPr2 = (byte) 3;
Byte tos = 4, tos2 = 4;
- byte proto = IPProtocols.UDP.byteValue(), proto2 = IPProtocols.UDP
- .byteValue();
+ byte proto = IPProtocols.UDP.byteValue(), proto2 = IPProtocols.UDP.byteValue();
short src = (short) 5500, src2 = (short) 5500;
short dst = 80, dst2 = 80;
// Make sure all values are equals
for (MatchType type : MatchType.values()) {
if (match1.isPresent(type)) {
- Assert.assertTrue(match1.getField(type).equals(
- match2.getField(type)));
+ Assert.assertTrue(match1.getField(type).equals(match2.getField(type)));
}
}
Assert.assertTrue(match1.equals(match2));
}
+ @Test
+ public void testHashCode() throws Exception {
+ byte srcMac1[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78, (byte) 0x9a, (byte) 0xbc };
+ byte srcMac2[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78, (byte) 0x9a, (byte) 0xbc };
+ byte dstMac1[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d, (byte) 0x5e, (byte) 0x6f };
+ byte dstMac2[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d, (byte) 0x5e, (byte) 0x6f };
+ short ethertype = EtherTypes.IPv4.shortValue();
+ short ethertype2 = EtherTypes.IPv4.shortValue();
+ InetAddress srcIP1 = InetAddress.getByName("1.1.1.1");
+ InetAddress ipMask1 = InetAddress.getByName("255.255.255.255");
+ InetAddress srcIP2 = InetAddress.getByName("1.1.1.1");
+ InetAddress ipMask2 = InetAddress.getByName("255.255.255.255");
+
+ Match match1 = new Match();
+ Match match2 = new Match();
+
+ MatchField field1 = new MatchField(MatchType.DL_SRC, srcMac1);
+ MatchField field2 = new MatchField(MatchType.DL_SRC, srcMac2);
+ Assert.assertTrue(field1.hashCode() == field2.hashCode());
+
+ match1.setField(field1);
+ match2.setField(field2);
+ Assert.assertTrue(match1.hashCode() == match2.hashCode());
+
+ MatchField field3 = new MatchField(MatchType.DL_DST, dstMac1);
+ MatchField field4 = new MatchField(MatchType.DL_DST, dstMac2);
+ Assert.assertTrue(field3.hashCode() == field4.hashCode());
+
+ match1.setField(field3);
+ match2.setField(field4);
+ Assert.assertTrue(match1.hashCode() == match2.hashCode());
+
+ MatchField field5 = new MatchField(MatchType.DL_TYPE, ethertype);
+ MatchField field6 = new MatchField(MatchType.DL_TYPE, ethertype2);
+ Assert.assertTrue(field5.hashCode() == field6.hashCode());
+
+ match1.setField(field5);
+ match2.setField(field6);
+ Assert.assertTrue(match1.hashCode() == match2 .hashCode());
+
+ MatchField field7 = new MatchField(MatchType.NW_SRC, srcIP1, ipMask1);
+ MatchField field8 = new MatchField(MatchType.NW_SRC, srcIP2, ipMask2);
+ Assert.assertTrue(field7.hashCode() == field8.hashCode());
+
+ match1.setField(field7);
+ match2.setField(field8);
+ Assert.assertTrue(match1.hashCode() == match2.hashCode());
+
+ }
+
@Test
public void testCloning() throws Exception {
Node node = NodeCreator.createOFNode(7l);
- NodeConnector port = NodeConnectorCreator.createOFNodeConnector(
- (short) 24, node);
- byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78,
- (byte) 0x9a, (byte) 0xbc };
- byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d,
- (byte) 0x5e, (byte) 0x6f };
- InetAddress srcIP = InetAddress
- .getByName("2001:420:281:1004:407a:57f4:4d15:c355");
- InetAddress dstIP = InetAddress
- .getByName("2001:420:281:1004:e123:e688:d655:a1b0");
- InetAddress ipMasks = InetAddress
- .getByName("ffff:ffff:ffff:ffff:0:0:0:0");
- InetAddress ipMaskd = InetAddress
- .getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:0");
+ NodeConnector port = NodeConnectorCreator.createOFNodeConnector((short) 24, node);
+ byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78, (byte) 0x9a, (byte) 0xbc };
+ byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d, (byte) 0x5e, (byte) 0x6f };
+ InetAddress srcIP = InetAddress.getByName("2001:420:281:1004:407a:57f4:4d15:c355");
+ InetAddress dstIP = InetAddress.getByName("2001:420:281:1004:e123:e688:d655:a1b0");
+ InetAddress ipMasks = InetAddress.getByName("ffff:ffff:ffff:ffff:0:0:0:0");
+ InetAddress ipMaskd = InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:0");
short ethertype = EtherTypes.IPv6.shortValue();
short vlan = (short) 27;
byte vlanPr = (byte) 3;
for (MatchType type : MatchType.values()) {
if (match.isPresent(type)) {
if (!match.getField(type).equals(cloned.getField(type))) {
- Assert.assertTrue(match.getField(type).equals(
- cloned.getField(type)));
+ Assert.assertTrue(match.getField(type).equals(cloned.getField(type)));
}
}
}
- // Make sure none of the fields couples are pointing to the same reference
+ // Make sure none of the fields couples are pointing to the same
+ // reference
MatchField a = null, b = null;
for (MatchType type : MatchType.values()) {
a = match.getField(type);
Assert.assertTrue(match.equals(cloned));
- Assert.assertFalse(match.getField(MatchType.DL_SRC) == cloned
- .getField(MatchType.DL_SRC));
- Assert.assertFalse(match.getField(MatchType.NW_DST) == cloned
- .getField(MatchType.NW_DST));
- Assert.assertTrue(match.getField(MatchType.NW_DST).getMask().equals(
- cloned.getField(MatchType.NW_DST).getMask()));
+ Assert.assertFalse(match.getField(MatchType.DL_SRC) == cloned.getField(MatchType.DL_SRC));
+ Assert.assertFalse(match.getField(MatchType.NW_DST) == cloned.getField(MatchType.NW_DST));
+ Assert.assertTrue(match.getField(MatchType.NW_DST).getMask()
+ .equals(cloned.getField(MatchType.NW_DST).getMask()));
+ Assert.assertTrue(match.hashCode() == cloned.hashCode());
}
@Test
public void testFlip() throws Exception {
Node node = NodeCreator.createOFNode(7l);
- NodeConnector port = NodeConnectorCreator.createOFNodeConnector(
- (short) 24, node);
- byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78,
- (byte) 0x9a, (byte) 0xbc };
- byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d,
- (byte) 0x5e, (byte) 0x6f };
- InetAddress srcIP = InetAddress
- .getByName("2001:420:281:1004:407a:57f4:4d15:c355");
- InetAddress dstIP = InetAddress
- .getByName("2001:420:281:1004:e123:e688:d655:a1b0");
- InetAddress ipMasks = InetAddress
- .getByName("ffff:ffff:ffff:ffff:0:0:0:0");
- InetAddress ipMaskd = InetAddress
- .getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:0");
+ NodeConnector port = NodeConnectorCreator.createOFNodeConnector((short) 24, node);
+ byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78, (byte) 0x9a, (byte) 0xbc };
+ byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d, (byte) 0x5e, (byte) 0x6f };
+ InetAddress srcIP = InetAddress.getByName("2001:420:281:1004:407a:57f4:4d15:c355");
+ InetAddress dstIP = InetAddress.getByName("2001:420:281:1004:e123:e688:d655:a1b0");
+ InetAddress ipMasks = InetAddress.getByName("ffff:ffff:ffff:ffff:0:0:0:0");
+ InetAddress ipMaskd = InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:0");
short ethertype = EtherTypes.IPv6.shortValue();
short vlan = (short) 27;
byte vlanPr = (byte) 3;
Match flipped = match.reverse();
- Assert.assertTrue(match.getField(MatchType.DL_TYPE).equals(
- flipped.getField(MatchType.DL_TYPE)));
- Assert.assertTrue(match.getField(MatchType.DL_VLAN).equals(
- flipped.getField(MatchType.DL_VLAN)));
-
- Assert.assertTrue(match.getField(MatchType.DL_DST).getValue().equals(
- flipped.getField(MatchType.DL_SRC).getValue()));
- Assert.assertTrue(match.getField(MatchType.DL_DST).getMask() == flipped
- .getField(MatchType.DL_SRC).getMask());
-
- Assert.assertTrue(match.getField(MatchType.NW_DST).getValue().equals(
- flipped.getField(MatchType.NW_SRC).getValue()));
- Assert.assertTrue(match.getField(MatchType.NW_DST).getMask() == flipped
- .getField(MatchType.NW_SRC).getMask());
-
- Assert.assertTrue(match.getField(MatchType.TP_DST).getValue().equals(
- flipped.getField(MatchType.TP_SRC).getValue()));
- Assert.assertTrue(match.getField(MatchType.TP_DST).getMask() == flipped
- .getField(MatchType.TP_SRC).getMask());
+ Assert.assertTrue(match.getField(MatchType.DL_TYPE).equals(flipped.getField(MatchType.DL_TYPE)));
+ Assert.assertTrue(match.getField(MatchType.DL_VLAN).equals(flipped.getField(MatchType.DL_VLAN)));
+
+ Assert.assertTrue(match.getField(MatchType.DL_DST).getValue()
+ .equals(flipped.getField(MatchType.DL_SRC).getValue()));
+ Assert.assertTrue(match.getField(MatchType.DL_DST).getMask() == flipped.getField(MatchType.DL_SRC).getMask());
+
+ Assert.assertTrue(match.getField(MatchType.NW_DST).getValue()
+ .equals(flipped.getField(MatchType.NW_SRC).getValue()));
+ Assert.assertTrue(match.getField(MatchType.NW_DST).getMask() == flipped.getField(MatchType.NW_SRC).getMask());
+
+ Assert.assertTrue(match.getField(MatchType.TP_DST).getValue()
+ .equals(flipped.getField(MatchType.TP_SRC).getValue()));
+ Assert.assertTrue(match.getField(MatchType.TP_DST).getMask() == flipped.getField(MatchType.TP_SRC).getMask());
Match flipflip = flipped.reverse().reverse();
Assert.assertTrue(flipflip.equals(flipped));
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>binding-generator</artifactId>\r
- <version>0.5.1-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
</parent>\r
<artifactId>binding-generator-api</artifactId>\r
<dependencies>\r
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>binding-generator</artifactId>\r
- <version>0.5.1-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
</parent>\r
<artifactId>binding-generator-impl</artifactId>\r
<dependencies>\r
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>binding-generator</artifactId>\r
- <version>0.5.1-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
</parent>\r
<artifactId>binding-generator-spi</artifactId>\r
<dependencies>\r
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>binding-generator</artifactId>\r
- <version>0.5.1-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
</parent>\r
<artifactId>binding-generator-util</artifactId>\r
<dependencies>\r
package org.opendaylight.controller.binding.generator.util;
+import java.util.*;
+
import org.opendaylight.controller.binding.generator.util.generated.type.builder.GeneratedTOBuilderImpl;
import org.opendaylight.controller.sal.binding.model.api.type.builder.GeneratedTOBuilder;
import org.opendaylight.controller.yang.common.QName;
import org.opendaylight.controller.yang.model.api.SchemaPath;
import org.opendaylight.controller.yang.model.api.TypeDefinition;
-import java.util.*;
-
public final class BindingGeneratorUtil {
- private static final String[] SET_VALUES = new String[]{"abstract",
- "assert", "boolean", "break", "byte", "case", "catch", "char",
- "class", "const", "continue", "default", "double", "do", "else",
- "enum", "extends", "false", "final", "finally", "float", "for",
- "goto", "if", "implements", "import", "instanceof", "int",
- "interface", "long", "native", "new", "null", "package", "private",
- "protected", "public", "return", "short", "static", "strictfp",
- "super", "switch", "synchronized", "this", "throw", "throws",
- "transient", "true", "try", "void", "volatile", "while"};
+ private static final String[] SET_VALUES = new String[] { "abstract", "assert", "boolean", "break", "byte", "case",
+ "catch", "char", "class", "const", "continue", "default", "double", "do", "else", "enum", "extends",
+ "false", "final", "finally", "float", "for", "goto", "if", "implements", "import", "instanceof", "int",
+ "interface", "long", "native", "new", "null", "package", "private", "protected", "public", "return",
+ "short", "static", "strictfp", "super", "switch", "synchronized", "this", "throw", "throws", "transient",
+ "true", "try", "void", "volatile", "while" };
private BindingGeneratorUtil() {
}
- public static final Set<String> JAVA_RESERVED_WORDS = new HashSet<String>(
- Arrays.asList(SET_VALUES));
+ public static final Set<String> JAVA_RESERVED_WORDS = new HashSet<String>(Arrays.asList(SET_VALUES));
public static String validateJavaPackage(final String packageName) {
if (packageName != null) {
return parameterName;
}
- public static GeneratedTOBuilder schemaNodeToTransferObjectBuilder(
- final String packageName, final SchemaNode schemaNode,
- final String transObjectName) {
- if (packageName != null && schemaNode != null
- && transObjectName != null) {
+ public static GeneratedTOBuilder schemaNodeToTransferObjectBuilder(final String packageName,
+ final SchemaNode schemaNode, final String transObjectName) {
+ if (packageName != null && schemaNode != null && transObjectName != null) {
- final String genTOName = BindingGeneratorUtil
- .parseToClassName(transObjectName);
- final GeneratedTOBuilder newType = new GeneratedTOBuilderImpl(
- packageName, genTOName);
+ final String genTOName = BindingGeneratorUtil.parseToClassName(transObjectName);
+ final GeneratedTOBuilder newType = new GeneratedTOBuilderImpl(packageName, genTOName);
return newType;
return null;
}
- public static String moduleNamespaceToPackageName(
- final Module module) {
+ public static String moduleNamespaceToPackageName(final Module module) {
final StringBuilder packageNameBuilder = new StringBuilder();
final Calendar calendar = Calendar.getInstance();
if (module.getRevision() == null) {
- throw new IllegalArgumentException("Module " + module.getName()
- + " does not specify revision date!");
+ throw new IllegalArgumentException("Module " + module.getName() + " does not specify revision date!");
}
packageNameBuilder.append("org.opendaylight.yang.gen.v");
packageNameBuilder.append(module.getYangVersion());
return validateJavaPackage(packageNameBuilder.toString());
}
- public static String packageNameForGeneratedType(
- final String basePackageName, final SchemaPath schemaPath) {
+ public static String packageNameForGeneratedType(final String basePackageName, final SchemaPath schemaPath) {
if (basePackageName == null) {
- throw new IllegalArgumentException("Base Package Name cannot be " +
- "NULL!");
+ throw new IllegalArgumentException("Base Package Name cannot be NULL!");
}
if (schemaPath == null) {
throw new IllegalArgumentException("Schema Path cannot be NULL!");
return validateJavaPackage(builder.toString());
}
- public static String packageNameForTypeDefinition(
- final String basePackageName, final TypeDefinition<?> typeDefinition) {
+ public static String packageNameForTypeDefinition(final String basePackageName,
+ final TypeDefinition<?> typeDefinition) {
if (basePackageName == null) {
- throw new IllegalArgumentException("Base Package Name cannot be " +
- "NULL!");
+ throw new IllegalArgumentException("Base Package Name cannot be NULL!");
}
if (typeDefinition == null) {
- throw new IllegalArgumentException("Type Definition reference " +
- "cannot be NULL!");
+ throw new IllegalArgumentException("Type Definition reference cannot be NULL!");
}
final StringBuilder builder = new StringBuilder();
// check if 'toBeRemoved' character is not the only character in
// 'text'
if (sb.length() == 0) {
- throw new IllegalArgumentException("Name can not be '"
- + toBeRemoved + "'");
+ throw new IllegalArgumentException("Name can not be '" + toBeRemoved + "'");
}
- String replacement = String.valueOf(sb.charAt(toBeRemovedPos))
- .toUpperCase();
+ String replacement = String.valueOf(sb.charAt(toBeRemovedPos)).toUpperCase();
sb.setCharAt(toBeRemovedPos, replacement.charAt(0));
toBeRemovedPos = sb.indexOf(toBeRemoved);
}
this.comment = comment;
this.annotations = toUnmodifiableAnnotations(annotationBuilders);
this.implementsTypes = Collections.unmodifiableList(implementsTypes);
- this.constants = toUnmodifiableConstants(constants);
+ this.constants = Collections.unmodifiableList(constants);
this.enumerations = toUnmodifiableEnumerations(enumBuilders);
this.methodSignatures = toUnmodifiableMethods(methodBuilders);
this.enclosedTypes = toUnmodifiableEnclosedTypes(enclosedGenTypeBuilders, enclosedGenTOBuilders);
return Collections.unmodifiableList(enums);
}
- protected List<Constant> toUnmodifiableConstants(
- List<Constant> constants) {
- for (final Constant constant : constants) {
- constants.add(new ConstantImpl(this, constant.getType(),
- constant.getName(), constant.getValue()));
- }
- return Collections.unmodifiableList(constants);
- }
-
@Override
public Type getParentType() {
return parent;
*/
package org.opendaylight.controller.binding.generator.util.generated.type.builder;
+import org.opendaylight.controller.binding.generator.util.AbstractBaseType;
import org.opendaylight.controller.sal.binding.model.api.AccessModifier;
import org.opendaylight.controller.sal.binding.model.api.Constant;
import org.opendaylight.controller.sal.binding.model.api.Type;
import java.util.ArrayList;
import java.util.List;
-abstract class AbstractGeneratedTypeBuilder implements GeneratedTypeBuilder {
+abstract class AbstractGeneratedTypeBuilder extends AbstractBaseType implements GeneratedTypeBuilder {
private final String packageName;
private String comment = "";
private boolean isAbstract;
public AbstractGeneratedTypeBuilder(final String packageName, final String name) {
+ super(packageName, name);
if (packageName == null) {
throw new IllegalArgumentException("Package Name for Generated Type cannot be null!");
}
this.name = name;
}
- @Override
- public String getPackageName() {
- return packageName;
- }
-
- @Override
- public String getName() {
- return name;
- }
-
- @Override
- public String getFullyQualifiedName() {
- return packageName + "." + name;
- }
-
-
protected String getComment() {
return comment;
}
import java.util.Collections;
import java.util.List;
+import org.opendaylight.controller.binding.generator.util.AbstractBaseType;
import org.opendaylight.controller.sal.binding.model.api.AnnotationType;
import org.opendaylight.controller.sal.binding.model.api.type.builder.AnnotationTypeBuilder;
-final class AnnotationTypeBuilderImpl implements AnnotationTypeBuilder {
+final class AnnotationTypeBuilderImpl extends AbstractBaseType implements AnnotationTypeBuilder {
private final String packageName;
private final String name;
private final List<AnnotationType.Parameter> parameters;
public AnnotationTypeBuilderImpl(final String packageName, final String name) {
- super();
+ super(packageName, name);
this.packageName = packageName;
this.name = name;
annotationBuilders = new ArrayList<>();
parameters = new ArrayList<>();
}
- @Override
- public String getPackageName() {
- return packageName;
- }
-
- @Override
- public String getName() {
- return name;
- }
-
- @Override
- public String getFullyQualifiedName() {
- return packageName + "." + name;
- }
-
@Override
public AnnotationTypeBuilder addAnnotation(final String packageName, final String name) {
if (packageName != null && name != null) {
import java.util.Collections;
import java.util.List;
+import org.opendaylight.controller.binding.generator.util.AbstractBaseType;
import org.opendaylight.controller.sal.binding.model.api.AnnotationType;
import org.opendaylight.controller.sal.binding.model.api.Enumeration;
import org.opendaylight.controller.sal.binding.model.api.Type;
import org.opendaylight.controller.sal.binding.model.api.type.builder.AnnotationTypeBuilder;
import org.opendaylight.controller.sal.binding.model.api.type.builder.EnumBuilder;
-public final class EnumerationBuilderImpl implements EnumBuilder {
+public final class EnumerationBuilderImpl extends AbstractBaseType implements EnumBuilder {
private final String packageName;
private final String name;
private final List<Enumeration.Pair> values;
- private final List<AnnotationTypeBuilder> annotationBuilders = new ArrayList<AnnotationTypeBuilder>();
+ private final List<AnnotationTypeBuilder> annotationBuilders = new ArrayList<>();
public EnumerationBuilderImpl(final String packageName, final String name) {
- super();
+ super(packageName, name);
this.packageName = packageName;
this.name = name;
values = new ArrayList<>();
}
-
- @Override
- public String getPackageName() {
- return packageName;
- }
-
- @Override
- public String getName() {
- return name;
- }
-
- @Override
- public String getFullyQualifiedName() {
- return packageName + "." + name;
- }
@Override
public AnnotationTypeBuilder addAnnotation(final String packageName, final String name) {
public GeneratedPropertyBuilderImpl(String name) {
super(name);
+ this.isReadOnly = true;
}
@Override
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>binding-generator</artifactId>\r
- <version>0.5.1-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
</parent>\r
<artifactId>binding-java-api-generator</artifactId>\r
<dependencies>\r
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>binding-generator</artifactId>\r
- <version>0.5.1-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
</parent>\r
<artifactId>binding-model-api</artifactId>\r
</project>
\ No newline at end of file
<parent>
<artifactId>binding-generator</artifactId>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
</parent>
<artifactId>maven-sal-api-gen-plugin</artifactId>
File outputBaseDir, Set<Module> yangModules) throws IOException {
final BindingGenerator bindingGenerator = new BindingGeneratorImpl();
- final List<Type> types = bindingGenerator.generateTypes(context);
+ final List<Type> types = bindingGenerator.generateTypes(context,yangModules);
final Set<GeneratedType> typesToGenerate = new HashSet<>();
final Set<GeneratedTransferObject> tosToGenerate = new HashSet<>();
final Set<Enumeration> enumsToGenerate = new HashSet<>();
<parent>
<artifactId>yang</artifactId>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<relativePath>../../yang/pom.xml</relativePath>
</parent>
<artifactId>yang-maven-plugin-it</artifactId>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<artifactId>test</artifactId>
<dependencies>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin-spi</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
</dependency>
</dependencies>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin-spi</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<type>test-jar</type>
</dependency>
</dependencies>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<artifactId>test</artifactId>
<dependencies>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin-spi</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
</dependency>
</dependencies>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin-spi</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<type>test-jar</type>
</dependency>
</dependencies>
<groupId>org.opendaylight.controller</groupId>
<artifactId>generator-test1</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<build>
<plugins>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin-spi</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<type>test-jar</type>
</dependency>
</dependencies>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<artifactId>generator-test2</artifactId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>generator-test1</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<scope>system</scope>
- <systemPath>${project.basedir}/../GenerateTest1/target/generator-test1-0.5.1-SNAPSHOT.jar</systemPath>
+ <systemPath>${project.basedir}/../GenerateTest1/target/generator-test1-0.5.3-SNAPSHOT.jar</systemPath>
</dependency>
</dependencies>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin-spi</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<type>test-jar</type>
</dependency>
</dependencies>
<parent>
<artifactId>binding-generator</artifactId>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
</parent>
<artifactId>test</artifactId>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin-spi</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<type>test-jar</type>
</dependency>
</dependencies>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<artifactId>generator-test2</artifactId>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin-spi</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<type>test-jar</type>
</dependency>
</dependencies>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<artifactId>test</artifactId>
<build>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin-spi</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<type>test-jar</type>
</dependency>
</dependencies>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<artifactId>test</artifactId>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin-spi</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<type>test-jar</type>
</dependency>
</dependencies>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<artifactId>test</artifactId>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin-spi</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<type>test-jar</type>
</dependency>
</dependencies>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<artifactId>test</artifactId>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin-spi</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<type>test-jar</type>
</dependency>
</dependencies>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<artifactId>test</artifactId>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<parent>
<artifactId>yang</artifactId>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<relativePath>../../yang/pom.xml</relativePath>
</parent>
<parent>
<artifactId>yang</artifactId>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<relativePath>../../yang/pom.xml</relativePath>
</parent>
<artifactId>yang-maven-plugin-spi</artifactId>
<artifactId>yang-prototype</artifactId>
<version>0.5-SNAPSHOT</version>
</parent>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<artifactId>binding-generator</artifactId>
<packaging>pom</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<!-- version of YANG tools dependencies -->
- <yang.version>0.5.2-SNAPSHOT</yang.version>
+ <yang.version>0.5.3-SNAPSHOT</yang.version>
</properties>
<modules>
<parent>
<artifactId>binding-generator</artifactId>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
</parent>
<artifactId>maven-code-gen-sample</artifactId>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>maven-sal-api-gen-plugin</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<type>jar</type>
</dependency>
</dependencies>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-binding</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
\ No newline at end of file
<parent>
<artifactId>binding-generator</artifactId>
<groupId>org.opendaylight.controller</groupId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
</parent>
<artifactId>modeling-sample</artifactId>
<plugin>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang-maven-plugin</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<executions>
<execution>
<goals>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>maven-sal-api-gen-plugin</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<type>jar</type>
</dependency>
</dependencies>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>yang</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<relativePath>../../yang/pom.xml</relativePath>
</parent>
<artifactId>yang-model-parser-api</artifactId>
/**
* Parse one or more Yang model files and return the definitions of Yang
- * modules defined in *.Yang files; <br>
+ * modules defined in *.yang files; <br>
* This method SHOULD be used if user need to parse multiple yang models
* that are referenced either through import or include statements.
*
*/
Set<Module> parseYangModels(final List<File> yangFiles);
+ /**
+ * Parse one or more Yang model files and return the definitions of Yang
+ * modules defined in *.yang files. <br>
+ * This method SHOULD be used if user has already parsed context and need to
+ * parse additinal yang models which can have dependencies on models in this
+ * context.
+ *
+ * @param yangFiles
+ * yang files to parse
+ * @param context
+ * SchemaContext containing already parsed yang models
+ * @return Set of Yang Modules
+ */
+ Set<Module> parseYangModels(final List<File> yangFiles, final SchemaContext context);
+
/**
* Equivalent to {@link #parseYangModels(List)} that returns parsed modules
* mapped to Files from which they were parsed.
*/
Map<File, Module> parseYangModelsMapped(final List<File> yangFiles);
- Set<Module> parseYangModelsFromStreams(
- final List<InputStream> yangModelStreams);
+ /**
+ * Parse one or more Yang model streams and return the definitions of Yang
+ * modules defined in *.yang files; <br>
+ * This method SHOULD be used if user need to parse multiple yang models
+ * that are referenced either through import or include statements.
+ *
+ * @param yangModelStreams
+ * yang streams to parse
+ * @return Set of Yang Modules
+ */
+ Set<Module> parseYangModelsFromStreams(final List<InputStream> yangModelStreams);
+
+ /**
+ * Parse one or more Yang model streams and return the definitions of Yang
+ * modules defined in *.yang files. <br>
+ * This method SHOULD be used if user has already parsed context and need to
+ * parse additinal yang models which can have dependencies on models in this
+ * context.
+ *
+ * @param yangModelStreams
+ * yang streams to parse
+ * @param context
+ * SchemaContext containing already parsed yang models
+ * @return Set of Yang Modules
+ */
+ Set<Module> parseYangModelsFromStreams(final List<InputStream> yangModelStreams, final SchemaContext context);
- Map<InputStream, Module> parseYangModelsFromStreamsMapped(
- final List<InputStream> yangModelStreams);
+ /**
+ * Equivalent to {@link #parseYangModels(List)} that returns parsed modules
+ * mapped to IputStreams from which they were parsed.
+ *
+ * @param yangModelStreams
+ * yang streams to parse
+ * @return Map of Yang Modules
+ */
+ Map<InputStream, Module> parseYangModelsFromStreamsMapped(final List<InputStream> yangModelStreams);
/**
* Creates {@link SchemaContext} from specified Modules. The modules SHOULD
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>yang</artifactId>\r
- <version>0.5.2-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
<relativePath>../../yang/pom.xml</relativePath>\r
</parent>\r
<artifactId>yang-model-parser-impl</artifactId>\r
import java.util.Set;
import org.opendaylight.controller.yang.common.QName;
+import org.opendaylight.controller.yang.model.api.DataSchemaNode;
+import org.opendaylight.controller.yang.model.api.GroupingDefinition;
public abstract class AbstractDataNodeContainerBuilder implements DataNodeContainerBuilder {
private final QName qname;
- protected final Set<DataSchemaNodeBuilder> childNodes = new HashSet<DataSchemaNodeBuilder>();
- protected final Set<GroupingBuilder> groupings = new HashSet<GroupingBuilder>();
+
+ protected Set<DataSchemaNode> childNodes;
+ protected final Set<DataSchemaNodeBuilder> addedChildNodes = new HashSet<DataSchemaNodeBuilder>();
+
+ protected Set<GroupingDefinition> groupings;
+ protected final Set<GroupingBuilder> addedGroupings = new HashSet<GroupingBuilder>();
protected AbstractDataNodeContainerBuilder(QName qname) {
this.qname = qname;
@Override
public Set<DataSchemaNodeBuilder> getChildNodes() {
- return childNodes;
+ return addedChildNodes;
}
@Override
public void addChildNode(DataSchemaNodeBuilder childNode) {
- childNodes.add(childNode);
+ addedChildNodes.add(childNode);
+ }
+
+ public void setChildNodes(Set<DataSchemaNode> childNodes) {
+ this.childNodes = childNodes;
}
public Set<GroupingBuilder> getGroupings() {
- return groupings;
+ return addedGroupings;
}
@Override
public void addGrouping(GroupingBuilder grouping) {
- groupings.add(grouping);
+ addedGroupings.add(grouping);
+ }
+
+ public void setGroupings(final Set<GroupingDefinition> groupings) {
+ this.groupings = groupings;
}
}
*/
void addAugmentation(AugmentationSchemaBuilder augment);
+ /**
+ * Build again already built data node.
+ *
+ * In general, when Builder.build is called first time, it creates YANG data
+ * model node instance. With every other call it just return this instance
+ * without checking for properties change. This method causes that builder
+ * object process again all its properties and return an updated instance of
+ * YANG data node.
+ */
+ void rebuild();
+
}
*/
public interface Builder {
+ /**
+ * Build YANG data model node.
+ *
+ * This method should create an instance of YANG data model node. After
+ * creating an instance, this instance should be returned for each call
+ * without repeating build process.
+ *
+ * @return YANG data model node
+ */
Object build();
int getLine();
DataSchemaNode build();
+ boolean isAugmenting();
+
void setAugmenting(boolean augmenting);
+ boolean isConfiguration();
+
void setConfiguration(boolean configuration);
ConstraintsBuilder getConstraints();
\r
void setPath(SchemaPath schemaPath);\r
\r
+ String getDescription();\r
+\r
void setDescription(String description);\r
\r
+ String getReference();\r
+\r
void setReference(String reference);\r
\r
+ Status getStatus();\r
+\r
void setStatus(Status status);\r
\r
void addUnknownSchemaNode(UnknownSchemaNodeBuilder unknownNode);\r
private SchemaPath path;
private final AnyXmlSchemaNodeImpl instance;
private final ConstraintsBuilder constraints;
+
+ private List<UnknownSchemaNode> unknownNodes;
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
private String description;
instance.setAugmenting(augmenting);
// UNKNOWN NODES
- final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
- for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
- unknownNodes.add(b.build());
+ if(unknownNodes == null) {
+ unknownNodes = new ArrayList<UnknownSchemaNode>();
+ for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
+ unknownNodes.add(b.build());
+ }
}
instance.setUnknownSchemaNodes(unknownNodes);
return addedUnknownNodes;
}
+ public void setUnknownNodes(List<UnknownSchemaNode> unknownNodes) {
+ this.unknownNodes = unknownNodes;
+ }
+
public String getDescription() {
return description;
}
}
}
+ @Override
+ public boolean isAugmenting() {
+ return augmenting;
+ }
+
@Override
public void setAugmenting(final boolean augmenting) {
this.augmenting = augmenting;
import org.opendaylight.controller.yang.parser.builder.api.DataSchemaNodeBuilder;
public final class ChoiceBuilder implements DataSchemaNodeBuilder, AugmentationTargetBuilder {
- private boolean built;
+ private boolean isBuilt;
private final ChoiceNodeImpl instance;
private final int line;
// SchemaNode args
private String description;
private String reference;
private Status status = Status.CURRENT;
+ private List<UnknownSchemaNode> unknownNodes;
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
// DataSchemaNode args
private boolean augmenting;
// AugmentationTarget args
private final Set<AugmentationSchemaBuilder> addedAugmentations = new HashSet<AugmentationSchemaBuilder>();
// ChoiceNode args
- private final Set<ChoiceCaseBuilder> cases = new HashSet<ChoiceCaseBuilder>();
+ private Set<ChoiceCaseNode> cases;
+ private final Set<ChoiceCaseBuilder> addedCases = new HashSet<ChoiceCaseBuilder>();
private String defaultCase;
public ChoiceBuilder(final QName qname, final int line) {
@Override
public ChoiceNode build() {
- if (!built) {
+ if (!isBuilt) {
instance.setPath(schemaPath);
instance.setDescription(description);
instance.setReference(reference);
instance.setDefaultCase(defaultCase);
// CASES
- final Set<ChoiceCaseNode> choiceCases = new HashSet<ChoiceCaseNode>();
- for (ChoiceCaseBuilder caseBuilder : cases) {
- choiceCases.add(caseBuilder.build());
+ if(cases == null) {
+ cases = new HashSet<ChoiceCaseNode>();
+ for (ChoiceCaseBuilder caseBuilder : addedCases) {
+ cases.add(caseBuilder.build());
+ }
}
- instance.setCases(choiceCases);
+ instance.setCases(cases);
// AUGMENTATIONS
final Set<AugmentationSchema> augmentations = new HashSet<AugmentationSchema>();
instance.setAvailableAugmentations(augmentations);
// UNKNOWN NODES
- final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
- for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
- unknownNodes.add(b.build());
+ if(unknownNodes == null) {
+ unknownNodes = new ArrayList<UnknownSchemaNode>();
+ for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
+ unknownNodes.add(b.build());
+ }
}
instance.setUnknownSchemaNodes(unknownNodes);
- built = true;
+ isBuilt = true;
}
return instance;
}
+ @Override
+ public void rebuild() {
+ isBuilt = false;
+ build();
+ }
+
@Override
public int getLine() {
return line;
}
public Set<ChoiceCaseBuilder> getCases() {
- return cases;
+ return addedCases;
}
public void addChildNode(DataSchemaNodeBuilder childNode) {
if (!(childNode instanceof ChoiceCaseBuilder)) {
ChoiceCaseBuilder caseBuilder = new ChoiceCaseBuilder(childNode.getQName(), childNode.getLine());
caseBuilder.addChildNode(childNode);
- cases.add(caseBuilder);
+ addedCases.add(caseBuilder);
} else {
- cases.add((ChoiceCaseBuilder) childNode);
+ addedCases.add((ChoiceCaseBuilder) childNode);
}
}
+ public void setCases(Set<ChoiceCaseNode> cases) {
+ this.cases = cases;
+ }
+
@Override
public QName getQName() {
return qname;
return constraints;
}
- public List<UnknownSchemaNodeBuilder> getUnknownNodes() {
- return addedUnknownNodes;
- }
-
public Set<AugmentationSchemaBuilder> getAugmentations() {
return addedAugmentations;
}
addedAugmentations.add(augment);
}
+ public List<UnknownSchemaNodeBuilder> getUnknownNodes() {
+ return addedUnknownNodes;
+ }
+
@Override
public void addUnknownSchemaNode(UnknownSchemaNodeBuilder unknownNode) {
addedUnknownNodes.add(unknownNode);
}
+ public void setUnknownNodes(List<UnknownSchemaNode> unknownNodes) {
+ this.unknownNodes = unknownNodes;
+ }
+
public String getDefaultCase() {
return defaultCase;
}
this.defaultCase = defaultCase;
}
- private final class ChoiceNodeImpl implements ChoiceNode {
+ public final class ChoiceNodeImpl implements ChoiceNode {
private final QName qname;
private SchemaPath path;
private String description;
this.defaultCase = defaultCase;
}
+ public ChoiceBuilder toBuilder() {
+ return ChoiceBuilder.this;
+ }
+
@Override
public int hashCode() {
final int prime = 31;
import org.opendaylight.controller.yang.model.api.UnknownSchemaNode;
import org.opendaylight.controller.yang.model.api.UsesNode;
import org.opendaylight.controller.yang.parser.builder.api.AbstractDataNodeContainerBuilder;
+import org.opendaylight.controller.yang.parser.builder.api.AugmentationSchemaBuilder;
+import org.opendaylight.controller.yang.parser.builder.api.AugmentationTargetBuilder;
import org.opendaylight.controller.yang.parser.builder.api.DataSchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.api.TypeDefinitionBuilder;
import org.opendaylight.controller.yang.parser.builder.api.UsesNodeBuilder;
import org.opendaylight.controller.yang.parser.util.YangParseException;
public final class ChoiceCaseBuilder extends AbstractDataNodeContainerBuilder implements
- DataSchemaNodeBuilder {
+ DataSchemaNodeBuilder, AugmentationTargetBuilder {
+ private boolean isBuilt;
private final ChoiceCaseNodeImpl instance;
private final int line;
private SchemaPath schemaPath;
private boolean augmenting;
private final ConstraintsBuilder constraints;
private final Set<UsesNodeBuilder> addedUsesNodes = new HashSet<UsesNodeBuilder>();
- private final Set<AugmentationSchema> augmentations = new HashSet<AugmentationSchema>();
+ private final Set<AugmentationSchemaBuilder> addedAugmentations = new HashSet<AugmentationSchemaBuilder>();
ChoiceCaseBuilder(final QName qname, final int line) {
super(qname);
@Override
public ChoiceCaseNode build() {
- instance.setConstraints(constraints.build());
- instance.setPath(schemaPath);
- instance.setDescription(description);
- instance.setReference(reference);
- instance.setStatus(status);
- instance.setAugmenting(augmenting);
- instance.setAvailableAugmentations(augmentations);
+ if(!isBuilt) {
+ instance.setConstraints(constraints.build());
+ instance.setPath(schemaPath);
+ instance.setDescription(description);
+ instance.setReference(reference);
+ instance.setStatus(status);
+ instance.setAugmenting(augmenting);
+
+ // CHILD NODES
+ final Map<QName, DataSchemaNode> childs = new HashMap<QName, DataSchemaNode>();
+ for (DataSchemaNodeBuilder node : addedChildNodes) {
+ childs.put(node.getQName(), node.build());
+ }
+ instance.setChildNodes(childs);
- // CHILD NODES
- final Map<QName, DataSchemaNode> childs = new HashMap<QName, DataSchemaNode>();
- for (DataSchemaNodeBuilder node : childNodes) {
- childs.put(node.getQName(), node.build());
- }
- instance.setChildNodes(childs);
+ // USES
+ final Set<UsesNode> uses = new HashSet<UsesNode>();
+ for (UsesNodeBuilder builder : addedUsesNodes) {
+ uses.add(builder.build());
+ }
+ instance.setUses(uses);
- // USES
- final Set<UsesNode> uses = new HashSet<UsesNode>();
- for (UsesNodeBuilder builder : addedUsesNodes) {
- uses.add(builder.build());
- }
- instance.setUses(uses);
+ // UNKNOWN NODES
+ final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
+ for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
+ unknownNodes.add(b.build());
+ }
+ instance.setUnknownSchemaNodes(unknownNodes);
+
+ // AUGMENTATIONS
+ final Set<AugmentationSchema> augmentations = new HashSet<AugmentationSchema>();
+ for (AugmentationSchemaBuilder builder : addedAugmentations) {
+ augmentations.add(builder.build());
+ }
+ instance.setAvailableAugmentations(augmentations);
- // UNKNOWN NODES
- final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
- for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
- unknownNodes.add(b.build());
+ isBuilt = true;
}
- instance.setUnknownSchemaNodes(unknownNodes);
return instance;
}
+ @Override
+ public void rebuild() {
+ isBuilt = false;
+ build();
+ }
+
@Override
public int getLine() {
return line;
"Can not add type definition to choice case.");
}
+ @Override
+ public boolean isConfiguration() {
+ return false;
+ }
+
@Override
public void setConfiguration(boolean configuration) {
throw new YangParseException(line,
return constraints;
}
- public Set<AugmentationSchema> getAugmentations() {
- return augmentations;
+ @Override
+ public void addAugmentation(AugmentationSchemaBuilder augment) {
+ addedAugmentations.add(augment);
}
- private final class ChoiceCaseNodeImpl implements ChoiceCaseNode {
+ public final class ChoiceCaseNodeImpl implements ChoiceCaseNode {
private final QName qname;
private SchemaPath path;
private String description;
}
}
+ public ChoiceCaseBuilder toBuilder() {
+ return ChoiceCaseBuilder.this;
+ }
+
@Override
public int hashCode() {
final int prime = 31;
private boolean presence;
private boolean augmenting;
private boolean configuration;
-
+ private Set<TypeDefinition<?>> typedefs;
private final Set<TypeDefinitionBuilder> addedTypedefs = new HashSet<TypeDefinitionBuilder>();
+ private Set<UsesNode> usesNodes;
private final Set<UsesNodeBuilder> addedUsesNodes = new HashSet<UsesNodeBuilder>();
+ private Set<AugmentationSchema> augmentations;
private final Set<AugmentationSchemaBuilder> addedAugmentations = new HashSet<AugmentationSchemaBuilder>();
+ private List<UnknownSchemaNode> unknownNodes;
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
public ContainerSchemaNodeBuilder(final QName qname, final int line) {
// CHILD NODES
final Map<QName, DataSchemaNode> childs = new HashMap<QName, DataSchemaNode>();
- for (DataSchemaNodeBuilder node : childNodes) {
- childs.put(node.getQName(), node.build());
+ if(childNodes == null) {
+ for (DataSchemaNodeBuilder node : addedChildNodes) {
+ childs.put(node.getQName(), node.build());
+ }
+ } else {
+ for(DataSchemaNode node : childNodes) {
+ childs.put(node.getQName(), node);
+ }
}
instance.setChildNodes(childs);
// GROUPINGS
- final Set<GroupingDefinition> groupingDefs = new HashSet<GroupingDefinition>();
- for (GroupingBuilder builder : groupings) {
- groupingDefs.add(builder.build());
+ if(groupings == null) {
+ groupings = new HashSet<GroupingDefinition>();
+ for (GroupingBuilder builder : addedGroupings) {
+ groupings.add(builder.build());
+ }
}
- instance.setGroupings(groupingDefs);
+ instance.setGroupings(groupings);
// TYPEDEFS
- final Set<TypeDefinition<?>> typedefs = new HashSet<TypeDefinition<?>>();
- for (TypeDefinitionBuilder entry : addedTypedefs) {
- typedefs.add(entry.build());
+ if(typedefs == null) {
+ typedefs = new HashSet<TypeDefinition<?>>();
+ for (TypeDefinitionBuilder entry : addedTypedefs) {
+ typedefs.add(entry.build());
+ }
}
instance.setTypeDefinitions(typedefs);
// USES
- final Set<UsesNode> uses = new HashSet<UsesNode>();
- for (UsesNodeBuilder builder : addedUsesNodes) {
- uses.add(builder.build());
+ if(usesNodes == null) {
+ usesNodes = new HashSet<UsesNode>();
+ for (UsesNodeBuilder builder : addedUsesNodes) {
+ usesNodes.add(builder.build());
+ }
}
- instance.setUses(uses);
+ instance.setUses(usesNodes);
// AUGMENTATIONS
- final Set<AugmentationSchema> augmentations = new HashSet<AugmentationSchema>();
- for (AugmentationSchemaBuilder builder : addedAugmentations) {
- augmentations.add(builder.build());
+ if(augmentations == null) {
+ augmentations = new HashSet<AugmentationSchema>();
+ for (AugmentationSchemaBuilder builder : addedAugmentations) {
+ augmentations.add(builder.build());
+ }
}
instance.setAvailableAugmentations(augmentations);
// UNKNOWN NODES
- final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
- for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
- unknownNodes.add(b.build());
+ if(unknownNodes == null) {
+ unknownNodes = new ArrayList<UnknownSchemaNode>();
+ for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
+ unknownNodes.add(b.build());
+ }
}
instance.setUnknownSchemaNodes(unknownNodes);
return instance;
}
+ @Override
+ public void rebuild() {
+ isBuilt = false;
+ build();
+ }
+
@Override
public int getLine() {
return line;
addedTypedefs.add(type);
}
+ public void setTypedefs(final Set<TypeDefinition<?>> typedefs) {
+ this.typedefs = typedefs;
+ }
+
public Set<AugmentationSchemaBuilder> getAugmentations() {
return addedAugmentations;
}
addedAugmentations.add(augment);
}
+ public void setAugmentations(final Set<AugmentationSchema> augmentations) {
+ this.augmentations = augmentations;
+ }
+
public SchemaPath getPath() {
return schemaPath;
}
this.schemaPath = schemaPath;
}
+ @Override
public String getDescription() {
return description;
}
this.description = description;
}
+ @Override
public String getReference() {
return reference;
}
this.reference = reference;
}
+ @Override
public Status getStatus() {
return status;
}
}
}
+ @Override
public boolean isAugmenting() {
return augmenting;
}
this.augmenting = augmenting;
}
+ @Override
public boolean isConfiguration() {
return configuration;
}
addedUsesNodes.add(usesNodeBuilder);
}
+ public void setUsesnodes(final Set<UsesNode> usesNodes) {
+ this.usesNodes = usesNodes;
+ }
+
public boolean isPresence() {
return presence;
}
addedUnknownNodes.add(unknownNode);
}
+ public void setUnknownNodes(List<UnknownSchemaNode> unknownNodes) {
+ this.unknownNodes = unknownNodes;
+ }
+
@Override
public String toString() {
return "container " + getQName().getLocalName();
}
- private final class ContainerSchemaNodeImpl implements ContainerSchemaNode {
+ public final class ContainerSchemaNodeImpl implements ContainerSchemaNode {
private final QName qname;
private SchemaPath path;
private String description;
}
}
+ public ContainerSchemaNodeBuilder toBuilder() {
+ return ContainerSchemaNodeBuilder.this;
+ }
+
@Override
public int hashCode() {
final int prime = 31;
private final int line;
private final QName qname;
private SchemaPath schemaPath;
+ private String description;
+ private String reference;
+ private Status status = Status.CURRENT;
private final List<UnknownSchemaNodeBuilder> addedExtensions = new ArrayList<UnknownSchemaNodeBuilder>();
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
public ExtensionDefinition build() {
if(!isBuilt) {
instance.setPath(schemaPath);
+ instance.setDescription(description);
+ instance.setReference(reference);
+ instance.setStatus(status);
// UNKNOWN NODES
final List<UnknownSchemaNode> extensions = new ArrayList<UnknownSchemaNode>();
}
@Override
- public void setDescription(String description) {
- instance.setDescription(description);
+ public String getDescription() {
+ return description;
}
@Override
- public void setReference(String reference) {
- instance.setReference(reference);
+ public void setDescription(final String description) {
+ this.description = description;
}
@Override
- public void setStatus(Status status) {
+ public String getReference() {
+ return reference;
+ }
+
+ @Override
+ public void setReference(final String reference) {
+ this.reference = reference;
+ }
+
+ @Override
+ public Status getStatus() {
+ return status;
+ }
+
+ @Override
+ public void setStatus(final Status status) {
instance.setStatus(status);
}
@Override
- public void addUnknownSchemaNode(UnknownSchemaNodeBuilder unknownNode) {
+ public void addUnknownSchemaNode(final UnknownSchemaNodeBuilder unknownNode) {
addedUnknownNodes.add(unknownNode);
}
private final int line;
private final QName qname;
private SchemaPath schemaPath;
+ private String description;
+ private String reference;
+ private Status status = Status.CURRENT;
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
FeatureBuilder(final QName qname, final int line) {
public FeatureDefinitionImpl build() {
if(!isBuilt) {
instance.setPath(schemaPath);
+ instance.setDescription(description);
+ instance.setReference(reference);
+ instance.setStatus(status);
// UNKNOWN NODES
final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
this.schemaPath = schemaPath;
}
+ @Override
+ public String getDescription() {
+ return description;
+ }
+
@Override
public void setDescription(final String description) {
- instance.setDescription(description);
+ this.description = description;
+ }
+
+ @Override
+ public String getReference() {
+ return reference;
}
@Override
public void setReference(final String reference) {
- instance.setReference(reference);
+ this.reference = reference;
+ }
+
+ @Override
+ public Status getStatus() {
+ return status;
}
@Override
public void setStatus(final Status status) {
- instance.setStatus(status);
+ this.status = status;
}
@Override
private SchemaPath schemaPath;
private String description;
private String reference;
- private Status status;
- private final Set<DataSchemaNodeBuilder> childNodes = new HashSet<DataSchemaNodeBuilder>();
- private final Set<GroupingBuilder> groupings = new HashSet<GroupingBuilder>();
+ private Status status = Status.CURRENT;
+
+ private Set<DataSchemaNode> childNodes;
+ private final Set<DataSchemaNodeBuilder> addedChildNodes = new HashSet<DataSchemaNodeBuilder>();
+
+ private Set<GroupingDefinition> groupings;
+ private final Set<GroupingBuilder> addedGroupings = new HashSet<GroupingBuilder>();
+
+ private Set<TypeDefinition<?>> typedefs;
private final Set<TypeDefinitionBuilder> addedTypedefs = new HashSet<TypeDefinitionBuilder>();
- private final Set<UsesNodeBuilder> usesNodes = new HashSet<UsesNodeBuilder>();
+
+ private Set<UsesNode> usesNodes;
+ private final Set<UsesNodeBuilder> addedUsesNodes = new HashSet<UsesNodeBuilder>();
+
+ private List<UnknownSchemaNode> unknownNodes;
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
public GroupingBuilderImpl(final QName qname, final int line) {
// CHILD NODES
final Map<QName, DataSchemaNode> childs = new HashMap<QName, DataSchemaNode>();
- for (DataSchemaNodeBuilder node : childNodes) {
- childs.put(node.getQName(), node.build());
+ if(childNodes == null) {
+ for (DataSchemaNodeBuilder node : addedChildNodes) {
+ childs.put(node.getQName(), node.build());
+ }
+ } else {
+ for(DataSchemaNode node : childNodes) {
+ childs.put(node.getQName(), node);
+ }
}
instance.setChildNodes(childs);
// GROUPINGS
- final Set<GroupingDefinition> groupingDefs = new HashSet<GroupingDefinition>();
- for (GroupingBuilder builder : groupings) {
- groupingDefs.add(builder.build());
+ if(groupings == null) {
+ groupings = new HashSet<GroupingDefinition>();
+ for (GroupingBuilder builder : addedGroupings) {
+ groupings.add(builder.build());
+ }
}
- instance.setGroupings(groupingDefs);
+ instance.setGroupings(groupings);
// TYPEDEFS
- final Set<TypeDefinition<?>> typedefs = new HashSet<TypeDefinition<?>>();
- for (TypeDefinitionBuilder entry : addedTypedefs) {
- typedefs.add(entry.build());
+ if(typedefs == null) {
+ typedefs = new HashSet<TypeDefinition<?>>();
+ for (TypeDefinitionBuilder entry : addedTypedefs) {
+ typedefs.add(entry.build());
+ }
}
instance.setTypeDefinitions(typedefs);
// USES
- final Set<UsesNode> usesNodeDefs = new HashSet<UsesNode>();
- for (UsesNodeBuilder builder : usesNodes) {
- usesNodeDefs.add(builder.build());
+ if(usesNodes == null) {
+ usesNodes = new HashSet<UsesNode>();
+ for (UsesNodeBuilder builder : addedUsesNodes) {
+ usesNodes.add(builder.build());
+ }
}
- instance.setUses(usesNodeDefs);
+ instance.setUses(usesNodes);
// UNKNOWN NODES
- final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
- for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
- unknownNodes.add(b.build());
+ if(unknownNodes == null) {
+ unknownNodes = new ArrayList<UnknownSchemaNode>();
+ for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
+ unknownNodes.add(b.build());
+ }
}
instance.setUnknownSchemaNodes(unknownNodes);
addedTypedefs.add(type);
}
+ public void setTypedefs(final Set<TypeDefinition<?>> typedefs) {
+ this.typedefs = typedefs;
+ }
+
@Override
public SchemaPath getPath() {
return schemaPath;
@Override
public DataSchemaNodeBuilder getChildNode(String name) {
DataSchemaNodeBuilder result = null;
- for (DataSchemaNodeBuilder node : childNodes) {
+ for (DataSchemaNodeBuilder node : addedChildNodes) {
if (node.getQName().getLocalName().equals(name)) {
result = node;
break;
@Override
public void addChildNode(final DataSchemaNodeBuilder childNode) {
- childNodes.add(childNode);
+ addedChildNodes.add(childNode);
}
@Override
public Set<DataSchemaNodeBuilder> getChildNodes() {
- return childNodes;
+ return addedChildNodes;
+ }
+
+ public void setChildNodes(final Set<DataSchemaNode> childNodes) {
+ this.childNodes = childNodes;
}
@Override
public Set<GroupingBuilder> getGroupings() {
- return groupings;
+ return addedGroupings;
}
@Override
public void addGrouping(final GroupingBuilder grouping) {
- groupings.add(grouping);
+ addedGroupings.add(grouping);
+ }
+
+ public void setGroupings(final Set<GroupingDefinition> groupings) {
+ this.groupings = groupings;
}
@Override
public Set<UsesNodeBuilder> getUses() {
- return usesNodes;
+ return addedUsesNodes;
}
@Override
public void addUsesNode(final UsesNodeBuilder usesBuilder) {
- usesNodes.add(usesBuilder);
+ addedUsesNodes.add(usesBuilder);
+ }
+
+ public void setUsesnodes(final Set<UsesNode> usesNodes) {
+ this.usesNodes = usesNodes;
}
@Override
addedUnknownNodes.add(unknownNode);
}
+ public void setUnknownNodes(List<UnknownSchemaNode> unknownNodes) {
+ this.unknownNodes = unknownNodes;
+ }
+
+
private final class GroupingDefinitionImpl implements GroupingDefinition {
private final QName qname;
private SchemaPath path;
import org.opendaylight.controller.yang.parser.builder.api.SchemaNodeBuilder;
public final class IdentitySchemaNodeBuilder implements SchemaNodeBuilder {
+ private boolean isBuilt;
private final IdentitySchemaNodeImpl instance;
private final int line;
private final QName qname;
private SchemaPath schemaPath;
- private IdentitySchemaNodeBuilder baseIdentity;
+ private String description;
+ private String reference;
+ private Status status = Status.CURRENT;
+ private IdentitySchemaNodeBuilder baseIdentityBuilder;
+ private IdentitySchemaNode baseIdentity;
private String baseIdentityName;
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
@Override
public IdentitySchemaNode build() {
- instance.setPath(schemaPath);
- if (baseIdentity != null) {
- instance.setBaseIdentity(baseIdentity.build());
- }
+ if(!isBuilt) {
+ instance.setPath(schemaPath);
+ instance.setDescription(description);
+ instance.setReference(reference);
+ instance.setStatus(status);
+
+ if (baseIdentity == null) {
+ if (baseIdentityBuilder != null) {
+ instance.setBaseIdentity(baseIdentityBuilder.build());
+ }
+ } else {
+ instance.setBaseIdentity(baseIdentity);
+ }
- // UNKNOWN NODES
- final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
- for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
- unknownNodes.add(b.build());
+ // UNKNOWN NODES
+ final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
+ for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
+ unknownNodes.add(b.build());
+ }
+ instance.setUnknownSchemaNodes(unknownNodes);
+
+ isBuilt = true;
}
- instance.setUnknownSchemaNodes(unknownNodes);
return instance;
}
this.schemaPath = schemaPath;
}
+ @Override
+ public String getDescription() {
+ return description;
+ }
+
@Override
public void setDescription(final String description) {
- instance.setDescription(description);
+ this.description = description;
+ }
+
+ @Override
+ public String getReference() {
+ return reference;
}
@Override
public void setReference(final String reference) {
- instance.setReference(reference);
+ this.reference = reference;
+ }
+
+ @Override
+ public Status getStatus() {
+ return status;
}
@Override
public void setStatus(final Status status) {
if (status != null) {
- instance.setStatus(status);
+ this.status = status;
}
}
@Override
public void addUnknownSchemaNode(final UnknownSchemaNodeBuilder unknownNode) {
-
-
addedUnknownNodes.add(unknownNode);
}
}
public void setBaseIdentity(final IdentitySchemaNodeBuilder baseType) {
+ this.baseIdentityBuilder = baseType;
+ }
+
+ public void setBaseIdentity(final IdentitySchemaNode baseType) {
this.baseIdentity = baseType;
}
return unknownNodes;
}
- private void setUnknownSchemaNodes(
- List<UnknownSchemaNode> unknownSchemaNodes) {
+ private void setUnknownSchemaNodes(List<UnknownSchemaNode> unknownSchemaNodes) {
if (unknownSchemaNodes != null) {
this.unknownNodes = unknownSchemaNodes;
}
@Override
public String toString() {
- StringBuilder sb = new StringBuilder(
- IdentitySchemaNodeImpl.class.getSimpleName());
+ StringBuilder sb = new StringBuilder(IdentitySchemaNodeImpl.class.getSimpleName());
sb.append("[");
sb.append("base=" + baseIdentity);
sb.append(", qname=" + qname);
import org.opendaylight.controller.yang.model.api.UnknownSchemaNode;
import org.opendaylight.controller.yang.parser.builder.api.AbstractTypeAwareBuilder;
import org.opendaylight.controller.yang.parser.builder.api.DataSchemaNodeBuilder;
-import org.opendaylight.controller.yang.parser.builder.api.SchemaNodeBuilder;
-public final class LeafListSchemaNodeBuilder extends AbstractTypeAwareBuilder
- implements SchemaNodeBuilder, DataSchemaNodeBuilder {
+public final class LeafListSchemaNodeBuilder extends AbstractTypeAwareBuilder implements DataSchemaNodeBuilder {
private boolean isBuilt;
private final LeafListSchemaNodeImpl instance;
private final int line;
private String description;
private String reference;
private Status status = Status.CURRENT;
+ private List<UnknownSchemaNode> unknownNodes;
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
// DataSchemaNode args
private boolean augmenting;
}
// UNKNOWN NODES
- final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
- for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
- unknownNodes.add(b.build());
+ if (unknownNodes == null) {
+ unknownNodes = new ArrayList<UnknownSchemaNode>();
+ for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
+ unknownNodes.add(b.build());
+ }
}
instance.setUnknownSchemaNodes(unknownNodes);
addedUnknownNodes.add(unknownNode);
}
+ public void setUnknownNodes(List<UnknownSchemaNode> unknownNodes) {
+ this.unknownNodes = unknownNodes;
+ }
+
private final class LeafListSchemaNodeImpl implements LeafListSchemaNode {
private final QName qname;
private SchemaPath path;
@Override
public String toString() {
- StringBuilder sb = new StringBuilder(
- LeafListSchemaNodeImpl.class.getSimpleName());
+ StringBuilder sb = new StringBuilder(LeafListSchemaNodeImpl.class.getSimpleName());
sb.append("[");
- sb.append("qname=" + qname);
- sb.append(", path=" + path);
- sb.append(", augmenting=" + augmenting);
- sb.append(", configuration=" + configuration);
- sb.append(", constraints=" + constraintsDef);
- sb.append(", type=" + type);
- sb.append(", userOrdered=" + userOrdered);
+ sb.append(qname);
sb.append("]");
return sb.toString();
}
import org.opendaylight.controller.yang.model.api.UnknownSchemaNode;
import org.opendaylight.controller.yang.parser.builder.api.AbstractTypeAwareBuilder;
import org.opendaylight.controller.yang.parser.builder.api.DataSchemaNodeBuilder;
-import org.opendaylight.controller.yang.parser.builder.api.SchemaNodeBuilder;
-public final class LeafSchemaNodeBuilder extends AbstractTypeAwareBuilder
- implements DataSchemaNodeBuilder, SchemaNodeBuilder {
+public final class LeafSchemaNodeBuilder extends AbstractTypeAwareBuilder implements DataSchemaNodeBuilder {
private boolean isBuilt;
private final LeafSchemaNodeImpl instance;
private final int line;
private String description;
private String reference;
private Status status = Status.CURRENT;
+ private List<UnknownSchemaNode> unknownNodes;
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
// DataSchemaNode args
private boolean augmenting;
}
// UNKNOWN NODES
- final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
- for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
- unknownNodes.add(b.build());
+ if (unknownNodes == null) {
+ unknownNodes = new ArrayList<UnknownSchemaNode>();
+ for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
+ unknownNodes.add(b.build());
+ }
}
instance.setUnknownSchemaNodes(unknownNodes);
return addedUnknownNodes;
}
+ public void setUnknownNodes(List<UnknownSchemaNode> unknownNodes) {
+ this.unknownNodes = unknownNodes;
+ }
+
public String getDescription() {
return description;
}
@Override
public String toString() {
- StringBuilder sb = new StringBuilder(
- LeafSchemaNodeImpl.class.getSimpleName());
+ StringBuilder sb = new StringBuilder(LeafSchemaNodeImpl.class.getSimpleName());
sb.append("[");
sb.append("qname=" + qname);
sb.append(", path=" + path);
private String description;
private String reference;
private Status status = Status.CURRENT;
+ private List<UnknownSchemaNode> unknownNodes;
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
// DataSchemaNode args
private boolean augmenting;
private boolean configuration;
private final ConstraintsBuilder constraints;
// DataNodeContainer args
+ private Set<TypeDefinition<?>> typedefs;
private final Set<TypeDefinitionBuilder> addedTypedefs = new HashSet<TypeDefinitionBuilder>();
+ private Set<UsesNode> usesNodes;
private final Set<UsesNodeBuilder> addedUsesNodes = new HashSet<UsesNodeBuilder>();
// AugmentationTarget args
+ private Set<AugmentationSchema> augmentations;
private final Set<AugmentationSchemaBuilder> addedAugmentations = new HashSet<AugmentationSchemaBuilder>();
// ListSchemaNode args
private List<QName> keyDefinition = Collections.emptyList();
// CHILD NODES
final Map<QName, DataSchemaNode> childs = new HashMap<QName, DataSchemaNode>();
- for (DataSchemaNodeBuilder node : childNodes) {
- childs.put(node.getQName(), node.build());
+ if(childNodes == null) {
+ for (DataSchemaNodeBuilder node : addedChildNodes) {
+ childs.put(node.getQName(), node.build());
+ }
+ } else {
+ for(DataSchemaNode node : childNodes) {
+ childs.put(node.getQName(), node);
+ }
}
instance.setChildNodes(childs);
// TYPEDEFS
- final Set<TypeDefinition<?>> typedefs = new HashSet<TypeDefinition<?>>();
- for (TypeDefinitionBuilder entry : addedTypedefs) {
- typedefs.add(entry.build());
+ if(typedefs == null) {
+ typedefs = new HashSet<TypeDefinition<?>>();
+ for (TypeDefinitionBuilder entry : addedTypedefs) {
+ typedefs.add(entry.build());
+ }
}
instance.setTypeDefinitions(typedefs);
// USES
- final Set<UsesNode> usesNodeDefs = new HashSet<UsesNode>();
- for (UsesNodeBuilder builder : addedUsesNodes) {
- usesNodeDefs.add(builder.build());
+ if(usesNodes == null) {
+ usesNodes = new HashSet<UsesNode>();
+ for (UsesNodeBuilder builder : addedUsesNodes) {
+ usesNodes.add(builder.build());
+ }
}
- instance.setUses(usesNodeDefs);
+ instance.setUses(usesNodes);
// GROUPINGS
- final Set<GroupingDefinition> groupingDefs = new HashSet<GroupingDefinition>();
- for (GroupingBuilder builder : groupings) {
- groupingDefs.add(builder.build());
+ if(groupings == null) {
+ groupings = new HashSet<GroupingDefinition>();
+ for (GroupingBuilder builder : addedGroupings) {
+ groupings.add(builder.build());
+ }
}
- instance.setGroupings(groupingDefs);
+ instance.setGroupings(groupings);
// AUGMENTATIONS
- final Set<AugmentationSchema> augmentations = new HashSet<AugmentationSchema>();
- for (AugmentationSchemaBuilder builder : addedAugmentations) {
- augmentations.add(builder.build());
+ if(augmentations == null) {
+ augmentations = new HashSet<AugmentationSchema>();
+ for (AugmentationSchemaBuilder builder : addedAugmentations) {
+ augmentations.add(builder.build());
+ }
}
instance.setAvailableAugmentations(augmentations);
// UNKNOWN NODES
- final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
- for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
- unknownNodes.add(b.build());
+ if(unknownNodes == null) {
+ unknownNodes = new ArrayList<UnknownSchemaNode>();
+ for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
+ unknownNodes.add(b.build());
+ }
}
instance.setUnknownSchemaNodes(unknownNodes);
return instance;
}
+ @Override
+ public void rebuild() {
+ isBuilt = false;
+ build();
+ }
+
@Override
public int getLine() {
return line;
addedTypedefs.add(type);
}
+ public void setTypedefs(final Set<TypeDefinition<?>> typedefs) {
+ this.typedefs = typedefs;
+ }
+
public SchemaPath getPath() {
return schemaPath;
}
addedUsesNodes.add(usesBuilder);
}
+ public void setUsesnodes(final Set<UsesNode> usesNodes) {
+ this.usesNodes = usesNodes;
+ }
+
public Set<AugmentationSchemaBuilder> getAugmentations() {
return addedAugmentations;
}
addedAugmentations.add(augment);
}
+ public void setAugmentations(final Set<AugmentationSchema> augmentations) {
+ this.augmentations = augmentations;
+ }
+
public List<QName> getKeyDefinition() {
return keyDefinition;
}
addedUnknownNodes.add(unknownNode);
}
- private final class ListSchemaNodeImpl implements ListSchemaNode {
+ public void setUnknownNodes(List<UnknownSchemaNode> unknownNodes) {
+ this.unknownNodes = unknownNodes;
+ }
+
+
+ public final class ListSchemaNodeImpl implements ListSchemaNode {
private final QName qname;
private SchemaPath path;
private String description;
}
}
+ public ListSchemaNodeBuilder toBuilder() {
+ return ListSchemaNodeBuilder.this;
+ }
+
@Override
public int hashCode() {
final int prime = 31;
import org.opendaylight.controller.yang.model.api.RpcDefinition;
import org.opendaylight.controller.yang.model.api.SchemaPath;
import org.opendaylight.controller.yang.model.api.TypeDefinition;
+import org.opendaylight.controller.yang.model.api.UnknownSchemaNode;
import org.opendaylight.controller.yang.model.api.UsesNode;
import org.opendaylight.controller.yang.parser.builder.api.AugmentationSchemaBuilder;
import org.opendaylight.controller.yang.parser.builder.api.Builder;
private final Map<List<String>, TypeDefinitionBuilder> addedTypedefs = new HashMap<List<String>, TypeDefinitionBuilder>();
private final Map<List<String>, UnionTypeBuilder> addedUnionTypes = new HashMap<List<String>, UnionTypeBuilder>();
private final List<ExtensionBuilder> addedExtensions = new ArrayList<ExtensionBuilder>();
- private final Set<UnknownSchemaNodeBuilder> addedUnknownNodes = new HashSet<UnknownSchemaNodeBuilder>();
+ private final Map<List<String>, UnknownSchemaNodeBuilder> addedUnknownNodes = new HashMap<List<String>, UnknownSchemaNodeBuilder>();
private final Map<List<String>, TypeAwareBuilder> dirtyNodes = new HashMap<List<String>, TypeAwareBuilder>();
// NOTIFICATIONS
final Set<NotificationDefinition> notifications = new HashSet<NotificationDefinition>();
for (NotificationBuilder entry : addedNotifications) {
- notifications.add((NotificationDefinition) entry.build());
+ notifications.add(entry.build());
}
instance.setNotifications(notifications);
// DEVIATIONS
final Set<Deviation> deviations = new HashSet<Deviation>();
- for (Map.Entry<List<String>, DeviationBuilder> entry : addedDeviations
- .entrySet()) {
+ for (Map.Entry<List<String>, DeviationBuilder> entry : addedDeviations.entrySet()) {
deviations.add(entry.getValue().build());
}
instance.setDeviations(deviations);
}
instance.setIdentities(identities);
+ // UNKNOWN NODES
+ final List<UnknownSchemaNode> unknownNodes = buildModuleUnknownNodes(addedUnknownNodes);
+ instance.setUnknownSchemaNodes(unknownNodes);
+
return instance;
}
public Set<DataSchemaNodeBuilder> getChildNodes() {
final Set<DataSchemaNodeBuilder> children = new HashSet<DataSchemaNodeBuilder>();
- for (Map.Entry<List<String>, DataSchemaNodeBuilder> entry : childNodes
- .entrySet()) {
+ for (Map.Entry<List<String>, DataSchemaNodeBuilder> entry : childNodes.entrySet()) {
final List<String> path = entry.getKey();
final DataSchemaNodeBuilder child = entry.getValue();
if (path.size() == 2) {
}
public Set<UnknownSchemaNodeBuilder> getUnknownNodes() {
- return addedUnknownNodes;
+ return new HashSet<UnknownSchemaNodeBuilder>(addedUnknownNodes.values());
}
public Set<TypeDefinitionBuilder> getModuleTypedefs() {
final Set<TypeDefinitionBuilder> typedefs = new HashSet<TypeDefinitionBuilder>();
- for (Map.Entry<List<String>, TypeDefinitionBuilder> entry : addedTypedefs
- .entrySet()) {
+ for (Map.Entry<List<String>, TypeDefinitionBuilder> entry : addedTypedefs.entrySet()) {
if (entry.getKey().size() == 2) {
typedefs.add(entry.getValue());
}
public Set<GroupingBuilder> getModuleGroupings() {
final Set<GroupingBuilder> groupings = new HashSet<GroupingBuilder>();
- for (Map.Entry<List<String>, GroupingBuilder> entry : addedGroupings
- .entrySet()) {
+ for (Map.Entry<List<String>, GroupingBuilder> entry : addedGroupings.entrySet()) {
if (entry.getKey().size() == 2) {
groupings.add(entry.getValue());
}
public void addDirtyNode(final List<String> path) {
final List<String> dirtyNodePath = new ArrayList<String>(path);
- final TypeAwareBuilder nodeBuilder = (TypeAwareBuilder) actualPath
- .getFirst();
+ final TypeAwareBuilder nodeBuilder = (TypeAwareBuilder) actualPath.getFirst();
dirtyNodes.put(dirtyNodePath, nodeBuilder);
}
instance.setContact(contact);
}
- public boolean addModuleImport(final String moduleName,
- final Date revision, final String prefix) {
- final ModuleImport moduleImport = createModuleImport(moduleName,
- revision, prefix);
+ public boolean addModuleImport(final String moduleName, final Date revision, final String prefix) {
+ final ModuleImport moduleImport = createModuleImport(moduleName, revision, prefix);
return imports.add(moduleImport);
}
return builder;
}
- public ContainerSchemaNodeBuilder addContainerNode(
- final QName containerName, final List<String> parentPath,
+ public ContainerSchemaNodeBuilder addContainerNode(final QName containerName, final List<String> parentPath,
final int line) {
final List<String> pathToNode = new ArrayList<String>(parentPath);
- final ContainerSchemaNodeBuilder containerBuilder = new ContainerSchemaNodeBuilder(
- containerName, line);
+ final ContainerSchemaNodeBuilder containerBuilder = new ContainerSchemaNodeBuilder(containerName, line);
updateParent(containerBuilder, line, "container");
pathToNode.add(containerName.getLocalName());
return containerBuilder;
}
- public ListSchemaNodeBuilder addListNode(final QName listName,
- final List<String> parentPath, final int line) {
+ public ListSchemaNodeBuilder addListNode(final QName listName, final List<String> parentPath, final int line) {
final List<String> pathToNode = new ArrayList<String>(parentPath);
- final ListSchemaNodeBuilder listBuilder = new ListSchemaNodeBuilder(
- listName, line);
+ final ListSchemaNodeBuilder listBuilder = new ListSchemaNodeBuilder(listName, line);
updateParent(listBuilder, line, "list");
pathToNode.add(listName.getLocalName());
return listBuilder;
}
- public LeafSchemaNodeBuilder addLeafNode(final QName leafName,
- final List<String> parentPath, final int line) {
+ public LeafSchemaNodeBuilder addLeafNode(final QName leafName, final List<String> parentPath, final int line) {
final List<String> pathToNode = new ArrayList<String>(parentPath);
- final LeafSchemaNodeBuilder leafBuilder = new LeafSchemaNodeBuilder(
- leafName, line);
+ final LeafSchemaNodeBuilder leafBuilder = new LeafSchemaNodeBuilder(leafName, line);
updateParent(leafBuilder, line, "leaf");
pathToNode.add(leafName.getLocalName());
return leafBuilder;
}
- public LeafListSchemaNodeBuilder addLeafListNode(final QName qname,
- final List<String> parentPath, final int line) {
+ public LeafListSchemaNodeBuilder addLeafListNode(final QName qname, final List<String> parentPath, final int line) {
final List<String> pathToNode = new ArrayList<String>(parentPath);
- final LeafListSchemaNodeBuilder leafListBuilder = new LeafListSchemaNodeBuilder(
- qname, line);
+ final LeafListSchemaNodeBuilder leafListBuilder = new LeafListSchemaNodeBuilder(qname, line);
updateParent(leafListBuilder, line, "leaf-list");
pathToNode.add(qname.getLocalName());
return leafListBuilder;
}
- public GroupingBuilder addGrouping(final QName qname,
- final List<String> parentPath, final int line) {
+ public GroupingBuilder addGrouping(final QName qname, final List<String> parentPath, final int line) {
final List<String> pathToGroup = new ArrayList<String>(parentPath);
final GroupingBuilder builder = new GroupingBuilderImpl(qname, line);
if (parent instanceof DataNodeContainerBuilder) {
((DataNodeContainerBuilder) parent).addGrouping(builder);
} else {
- throw new YangParseException(name, line,
- "Unresolved parent of grouping " + qname.getLocalName());
+ throw new YangParseException(name, line, "Unresolved parent of grouping " + qname.getLocalName());
}
}
return builder;
}
- public AugmentationSchemaBuilder addAugment(final String name,
- final List<String> parentPath, final int line) {
+ public AugmentationSchemaBuilder addAugment(final String name, final List<String> parentPath, final int line) {
final List<String> pathToAugment = new ArrayList<String>(parentPath);
- final AugmentationSchemaBuilder builder = new AugmentationSchemaBuilderImpl(
- name, line);
+ final AugmentationSchemaBuilder builder = new AugmentationSchemaBuilderImpl(name, line);
// augment can only be in 'module' or 'uses' statement
if (!(actualPath.isEmpty())) {
if (parent instanceof UsesNodeBuilder) {
((UsesNodeBuilder) parent).addAugment(builder);
} else {
- throw new YangParseException(this.name, line,
- "Unresolved parent of augment " + name);
+ throw new YangParseException(this.name, line, "Unresolved parent of augment " + name);
}
}
return builder;
}
- public UsesNodeBuilder addUsesNode(final String groupingPathStr,
- final List<String> parentPath, final int line) {
+ public UsesNodeBuilder addUsesNode(final String groupingPathStr, final List<String> parentPath, final int line) {
final List<String> pathToUses = new ArrayList<String>(parentPath);
- final UsesNodeBuilder usesBuilder = new UsesNodeBuilderImpl(
- groupingPathStr, line);
+ final UsesNodeBuilder usesBuilder = new UsesNodeBuilderImpl(groupingPathStr, line);
if (!(actualPath.isEmpty())) {
final Builder parent = actualPath.getFirst();
}
((DataNodeContainerBuilder) parent).addUsesNode(usesBuilder);
} else {
- throw new YangParseException(name, line,
- "Unresolved parent of uses " + groupingPathStr);
+ throw new YangParseException(name, line, "Unresolved parent of uses " + groupingPathStr);
}
}
return usesBuilder;
}
- public void addRefine(final RefineHolder refine,
- final List<String> parentPath) {
+ public void addRefine(final RefineHolder refine, final List<String> parentPath) {
final List<String> path = new ArrayList<String>(parentPath);
if (actualPath.isEmpty()) {
- throw new YangParseException(name, refine.getLine(),
- "refine can be defined only in uses statement");
+ throw new YangParseException(name, refine.getLine(), "refine can be defined only in uses statement");
} else {
final Builder parent = actualPath.getFirst();
if (parent instanceof UsesNodeBuilder) {
((UsesNodeBuilder) parent).addRefine(refine);
} else {
- throw new YangParseException(name, refine.getLine(),
- "refine can be defined only in uses statement");
+ throw new YangParseException(name, refine.getLine(), "refine can be defined only in uses statement");
}
}
path.add(refine.getName());
}
- public RpcDefinitionBuilder addRpc(final QName qname,
- final List<String> parentPath, final int line) {
+ public RpcDefinitionBuilder addRpc(final QName qname, final List<String> parentPath, final int line) {
if (!(actualPath.isEmpty())) {
- throw new YangParseException(name, line,
- "rpc can be defined only in module or submodule");
+ throw new YangParseException(name, line, "rpc can be defined only in module or submodule");
}
final List<String> pathToRpc = new ArrayList<String>(parentPath);
- final RpcDefinitionBuilder rpcBuilder = new RpcDefinitionBuilder(qname,
- line);
+ final RpcDefinitionBuilder rpcBuilder = new RpcDefinitionBuilder(qname, line);
pathToRpc.add(qname.getLocalName());
addedRpcs.put(pathToRpc, rpcBuilder);
return rpcBuilder;
}
- public ContainerSchemaNodeBuilder addRpcInput(final QName inputQName,
- final int line) {
+ public ContainerSchemaNodeBuilder addRpcInput(final QName inputQName, final int line) {
final Builder parent = actualPath.getFirst();
if (!(parent instanceof RpcDefinitionBuilder)) {
- throw new YangParseException(name, line,
- "input can be defined only in rpc statement");
+ throw new YangParseException(name, line, "input can be defined only in rpc statement");
}
final RpcDefinitionBuilder rpc = (RpcDefinitionBuilder) parent;
- final ContainerSchemaNodeBuilder inputBuilder = new ContainerSchemaNodeBuilder(
- inputQName, line);
+ final ContainerSchemaNodeBuilder inputBuilder = new ContainerSchemaNodeBuilder(inputQName, line);
rpc.setInput(inputBuilder);
return inputBuilder;
}
- public ContainerSchemaNodeBuilder addRpcOutput(final QName outputQName,
- final int line) {
+ public ContainerSchemaNodeBuilder addRpcOutput(final QName outputQName, final int line) {
final Builder parent = actualPath.getFirst();
if (!(parent instanceof RpcDefinitionBuilder)) {
- throw new YangParseException(name, line,
- "output can be defined only in rpc statement");
+ throw new YangParseException(name, line, "output can be defined only in rpc statement");
}
final RpcDefinitionBuilder rpc = (RpcDefinitionBuilder) parent;
- final ContainerSchemaNodeBuilder outputBuilder = new ContainerSchemaNodeBuilder(
- outputQName, line);
+ final ContainerSchemaNodeBuilder outputBuilder = new ContainerSchemaNodeBuilder(outputQName, line);
rpc.setOutput(outputBuilder);
return outputBuilder;
}
- public NotificationBuilder addNotification(final QName notificationName,
- final List<String> parentPath, final int line) {
+ public NotificationBuilder addNotification(final QName notificationName, final List<String> parentPath,
+ final int line) {
if (!(actualPath.isEmpty())) {
- throw new YangParseException(name, line,
- "notification can be defined only in module or submodule");
+ throw new YangParseException(name, line, "notification can be defined only in module or submodule");
}
-
- final NotificationBuilder builder = new NotificationBuilder(
- notificationName, line);
+ final NotificationBuilder builder = new NotificationBuilder(notificationName, line);
final List<String> notificationPath = new ArrayList<String>(parentPath);
notificationPath.add(notificationName.getLocalName());
return builder;
}
- public FeatureBuilder addFeature(final QName featureName,
- final List<String> parentPath, final int line) {
+ public FeatureBuilder addFeature(final QName featureName, final List<String> parentPath, final int line) {
if (!(actualPath.isEmpty())) {
- throw new YangParseException(name, line,
- "feature can be defined only in module or submodule");
+ throw new YangParseException(name, line, "feature can be defined only in module or submodule");
}
final List<String> pathToFeature = new ArrayList<String>(parentPath);
return builder;
}
- public ChoiceBuilder addChoice(final QName choiceName,
- final List<String> parentPath, final int line) {
+ public ChoiceBuilder addChoice(final QName choiceName, final List<String> parentPath, final int line) {
final List<String> pathToChoice = new ArrayList<String>(parentPath);
final ChoiceBuilder builder = new ChoiceBuilder(choiceName, line);
if (!(actualPath.isEmpty())) {
- Builder parent = actualPath.getFirst();
+ final Builder parent = actualPath.getFirst();
if (parent instanceof DataNodeContainerBuilder) {
if (parent instanceof AugmentationSchemaBuilder) {
builder.setAugmenting(true);
}
((DataNodeContainerBuilder) parent).addChildNode(builder);
} else {
- throw new YangParseException(name, line,
- "Unresolved parent of choice "
- + choiceName.getLocalName());
+ throw new YangParseException(name, line, "Unresolved parent of choice " + choiceName.getLocalName());
}
}
return builder;
}
- public ChoiceCaseBuilder addCase(final QName caseName,
- final List<String> parentPath, final int line) {
+ public ChoiceCaseBuilder addCase(final QName caseName, final List<String> parentPath, final int line) {
final List<String> pathToCase = new ArrayList<String>(parentPath);
final ChoiceCaseBuilder builder = new ChoiceCaseBuilder(caseName, line);
builder.setAugmenting(true);
((AugmentationSchemaBuilder) parent).addChildNode(builder);
} else {
- throw new YangParseException(name, line,
- "Unresolved parent of 'case' "
- + caseName.getLocalName());
+ throw new YangParseException(name, line, "Unresolved parent of 'case' " + caseName.getLocalName());
}
}
return builder;
}
- public AnyXmlBuilder addAnyXml(final QName anyXmlName,
- final List<String> parentPath, final int line) {
+ public AnyXmlBuilder addAnyXml(final QName anyXmlName, final List<String> parentPath, final int line) {
final List<String> pathToAnyXml = new ArrayList<String>(parentPath);
final AnyXmlBuilder builder = new AnyXmlBuilder(anyXmlName, line);
updateParent(builder, line, "anyxml");
return builder;
}
- public TypeDefinitionBuilderImpl addTypedef(final QName typeDefName,
- final List<String> parentPath, final int line) {
+ public TypeDefinitionBuilderImpl addTypedef(final QName typeDefName, final List<String> parentPath, final int line) {
final List<String> pathToType = new ArrayList<String>(parentPath);
- final TypeDefinitionBuilderImpl builder = new TypeDefinitionBuilderImpl(
- typeDefName, line);
+ final TypeDefinitionBuilderImpl builder = new TypeDefinitionBuilderImpl(typeDefName, line);
if (!(actualPath.isEmpty())) {
final Builder parent = actualPath.getFirst();
if (parent instanceof TypeDefinitionAwareBuilder) {
((TypeDefinitionAwareBuilder) parent).addTypedef(builder);
} else {
- throw new YangParseException(name, line,
- "Unresolved parent of typedef "
- + typeDefName.getLocalName());
+ throw new YangParseException(name, line, "Unresolved parent of typedef " + typeDefName.getLocalName());
}
}
return builder;
}
- public void setType(final TypeDefinition<?> type,
- final List<String> parentPath) {
-
+ public void setType(final TypeDefinition<?> type, final List<String> parentPath) {
if (!(actualPath.isEmpty())) {
final Builder parent = actualPath.getFirst();
if (parent instanceof TypeAwareBuilder) {
((TypeAwareBuilder) parent).setType(type);
} else {
- throw new YangParseException("Failed to set type '"
- + type.getQName().getLocalName()
+ throw new YangParseException("Failed to set type '" + type.getQName().getLocalName()
+ "'. Unknown parent node: " + parent);
}
}
}
- public UnionTypeBuilder addUnionType(final List<String> currentPath,
- final URI namespace, final Date revision, final int line) {
+ public UnionTypeBuilder addUnionType(final List<String> currentPath, final URI namespace, final Date revision,
+ final int line) {
final List<String> pathToUnion = new ArrayList<String>(currentPath);
final UnionTypeBuilder union = new UnionTypeBuilder(line);
addedUnionTypes.put(path, union);
return union;
} else {
- throw new YangParseException(name, line,
- "Unresolved parent of union type.");
+ throw new YangParseException(name, line, "Unresolved parent of union type.");
}
}
}
- public void addIdentityrefType(final String baseString,
- final List<String> parentPath, final SchemaPath schemaPath,
+ public void addIdentityrefType(final String baseString, final List<String> parentPath, final SchemaPath schemaPath,
final int line) {
final List<String> pathToIdentityref = new ArrayList<String>(parentPath);
- final IdentityrefTypeBuilder identityref = new IdentityrefTypeBuilder(
- baseString, schemaPath, line);
+ final IdentityrefTypeBuilder identityref = new IdentityrefTypeBuilder(baseString, schemaPath, line);
if (actualPath.isEmpty()) {
throw new YangParseException(line, "identityref error");
typeParent.setTypedef(identityref);
dirtyNodes.put(pathToIdentityref, typeParent);
} else {
- throw new YangParseException(name, line,
- "Unresolved parent of identityref type.");
+ throw new YangParseException(name, line, "Unresolved parent of identityref type.");
}
}
}
- public DeviationBuilder addDeviation(final String targetPath,
- final List<String> parentPath, final int line) {
+ public DeviationBuilder addDeviation(final String targetPath, final List<String> parentPath, final int line) {
if (!(actualPath.isEmpty())) {
- throw new YangParseException(name, line,
- "deviation can be defined only in module or submodule");
+ throw new YangParseException(name, line, "deviation can be defined only in module or submodule");
}
final List<String> pathToDeviation = new ArrayList<String>(parentPath);
return builder;
}
- public IdentitySchemaNodeBuilder addIdentity(final QName qname,
- final List<String> parentPath, final int line) {
+ public IdentitySchemaNodeBuilder addIdentity(final QName qname, final List<String> parentPath, final int line) {
if (!(actualPath.isEmpty())) {
- throw new YangParseException(name, line,
- "identity can be defined only in module or submodule");
+ throw new YangParseException(name, line, "identity can be defined only in module or submodule");
}
final List<String> pathToIdentity = new ArrayList<String>(parentPath);
- final IdentitySchemaNodeBuilder builder = new IdentitySchemaNodeBuilder(
- qname, line);
+ final IdentitySchemaNodeBuilder builder = new IdentitySchemaNodeBuilder(qname, line);
pathToIdentity.add(qname.getLocalName());
addedIdentities.add(builder);
return builder;
}
- public void addConfiguration(final boolean configuration,
- final List<String> parentPath, final int line) {
+ public void addConfiguration(final boolean configuration, final List<String> parentPath, final int line) {
if (actualPath.isEmpty()) {
- throw new YangParseException(name, line,
- "Parent node of config statement not found.");
+ throw new YangParseException(name, line, "Parent node of config statement not found.");
} else {
final Builder parent = actualPath.getFirst();
if (parent instanceof DataSchemaNodeBuilder) {
- ((DataSchemaNodeBuilder) parent)
- .setConfiguration(configuration);
+ ((DataSchemaNodeBuilder) parent).setConfiguration(configuration);
} else if (parent instanceof RefineHolder) {
((RefineHolder) parent).setConfig(configuration);
} else if (parent instanceof DeviationBuilder) {
// current api
return;
} else {
- throw new YangParseException(name, line,
- "Unresolved parent of config statement.");
+ throw new YangParseException(name, line, "Unresolved parent of config statement.");
}
}
}
- public UnknownSchemaNodeBuilder addUnknownSchemaNode(final QName qname,
- final List<String> parentPath, final int line) {
- final UnknownSchemaNodeBuilder builder = new UnknownSchemaNodeBuilder(
- qname, line);
+ public UnknownSchemaNodeBuilder addUnknownSchemaNode(final QName qname, final List<String> parentPath,
+ final int line) {
+ final UnknownSchemaNodeBuilder builder = new UnknownSchemaNodeBuilder(qname, line);
if (!(actualPath.isEmpty())) {
final Builder parent = actualPath.getFirst();
} else if (parent instanceof RefineHolder) {
((RefineHolder) parent).addUnknownSchemaNode(builder);
} else {
- throw new YangParseException(name, line,
- "Unresolved parent of unknown node '"
- + qname.getLocalName() + "'");
+ throw new YangParseException(name, line, "Unresolved parent of unknown node '" + qname.getLocalName()
+ + "'");
}
}
-
- addedUnknownNodes.add(builder);
+ final List<String> unPath = new ArrayList<String>(parentPath);
+ unPath.add(qname.getLocalName());
+ addedUnknownNodes.put(unPath, builder);
return builder;
}
private Set<ModuleImport> imports = Collections.emptySet();
private Set<FeatureDefinition> features = Collections.emptySet();
private Set<TypeDefinition<?>> typeDefinitions = Collections.emptySet();
- private Set<NotificationDefinition> notifications = Collections
- .emptySet();
+ private Set<NotificationDefinition> notifications = Collections.emptySet();
private Set<AugmentationSchema> augmentations = Collections.emptySet();
private Set<RpcDefinition> rpcs = Collections.emptySet();
private Set<Deviation> deviations = Collections.emptySet();
private Map<QName, DataSchemaNode> childNodes = Collections.emptyMap();
private Set<GroupingDefinition> groupings = Collections.emptySet();
private Set<UsesNode> uses = Collections.emptySet();
- private List<ExtensionDefinition> extensionNodes = Collections
- .emptyList();
+ private List<ExtensionDefinition> extensionNodes = Collections.emptyList();
private Set<IdentitySchemaNode> identities = Collections.emptySet();
+ private List<UnknownSchemaNode> unknownNodes = Collections.emptyList();
private ModuleImpl(String name) {
this.name = name;
return extensionNodes;
}
- private void setExtensionSchemaNodes(
- List<ExtensionDefinition> extensionNodes) {
+ private void setExtensionSchemaNodes(final List<ExtensionDefinition> extensionNodes) {
if (extensionNodes != null) {
this.extensionNodes = extensionNodes;
}
return identities;
}
- private void setIdentities(Set<IdentitySchemaNode> identities) {
+ private void setIdentities(final Set<IdentitySchemaNode> identities) {
if (identities != null) {
this.identities = identities;
}
}
+ @Override
+ public List<UnknownSchemaNode> getUnknownSchemaNodes() {
+ return unknownNodes;
+ }
+
+ private void setUnknownSchemaNodes(final List<UnknownSchemaNode> unknownNodes) {
+ if (unknownNodes != null) {
+ this.unknownNodes = unknownNodes;
+ }
+ }
+
@Override
public DataSchemaNode getDataChildByName(QName name) {
return childNodes.get(name);
public int hashCode() {
final int prime = 31;
int result = 1;
- result = prime * result
- + ((namespace == null) ? 0 : namespace.hashCode());
+ result = prime * result + ((namespace == null) ? 0 : namespace.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
- result = prime * result
- + ((revision == null) ? 0 : revision.hashCode());
- result = prime * result
- + ((prefix == null) ? 0 : prefix.hashCode());
- result = prime * result
- + ((yangVersion == null) ? 0 : yangVersion.hashCode());
+ result = prime * result + ((revision == null) ? 0 : revision.hashCode());
+ result = prime * result + ((prefix == null) ? 0 : prefix.hashCode());
+ result = prime * result + ((yangVersion == null) ? 0 : yangVersion.hashCode());
return result;
}
@Override
public String toString() {
- StringBuilder sb = new StringBuilder(
- ModuleImpl.class.getSimpleName());
+ StringBuilder sb = new StringBuilder(ModuleImpl.class.getSimpleName());
sb.append("[");
sb.append("name=" + name);
sb.append(", namespace=" + namespace);
}
}
- private void updateParent(DataSchemaNodeBuilder nodeBuilder, int line,
- String nodeTypeName) {
+ private void updateParent(DataSchemaNodeBuilder nodeBuilder, int line, String nodeTypeName) {
if (!(actualPath.isEmpty())) {
final Builder parent = actualPath.getFirst();
if (parent instanceof DataNodeContainerBuilder) {
} else if (parent instanceof ChoiceBuilder) {
((ChoiceBuilder) parent).addChildNode(nodeBuilder);
} else {
- throw new YangParseException(name, line,
- "Unresolved parent of " + nodeTypeName + " "
- + nodeBuilder.getQName().getLocalName());
+ throw new YangParseException(name, line, "Unresolved parent of " + nodeTypeName + " "
+ + nodeBuilder.getQName().getLocalName());
}
}
}
- private ModuleImport createModuleImport(final String moduleName,
- final Date revision, final String prefix) {
- ModuleImport moduleImport = new ModuleImport() {
+ private ModuleImport createModuleImport(final String moduleName, final Date revision, final String prefix) {
+ final ModuleImport moduleImport = new ModuleImport() {
@Override
public String getModuleName() {
return moduleName;
public int hashCode() {
final int prime = 31;
int result = 1;
- result = prime * result
- + ((moduleName == null) ? 0 : moduleName.hashCode());
- result = prime * result
- + ((revision == null) ? 0 : revision.hashCode());
- result = prime * result
- + ((prefix == null) ? 0 : prefix.hashCode());
+ result = prime * result + ((moduleName == null) ? 0 : moduleName.hashCode());
+ result = prime * result + ((revision == null) ? 0 : revision.hashCode());
+ result = prime * result + ((prefix == null) ? 0 : prefix.hashCode());
return result;
}
@Override
public String toString() {
- return "ModuleImport[moduleName=" + moduleName + ", revision="
- + revision + ", prefix=" + prefix + "]";
+ return "ModuleImport[moduleName=" + moduleName + ", revision=" + revision + ", prefix=" + prefix + "]";
}
};
return moduleImport;
* @return map of children, where key is child QName and value is child
* itself
*/
- private Map<QName, DataSchemaNode> buildModuleChildNodes(
- Map<List<String>, DataSchemaNodeBuilder> addedChilds) {
+ private Map<QName, DataSchemaNode> buildModuleChildNodes(Map<List<String>, DataSchemaNodeBuilder> addedChilds) {
final Map<QName, DataSchemaNode> childNodes = new HashMap<QName, DataSchemaNode>();
- for (Map.Entry<List<String>, DataSchemaNodeBuilder> entry : addedChilds
- .entrySet()) {
+ for (Map.Entry<List<String>, DataSchemaNodeBuilder> entry : addedChilds.entrySet()) {
List<String> path = entry.getKey();
DataSchemaNodeBuilder child = entry.getValue();
if (path.size() == 2) {
* @param addedGroupings
* @return set of built GroupingDefinition objects
*/
- private Set<GroupingDefinition> buildModuleGroupings(
- Map<List<String>, GroupingBuilder> addedGroupings) {
+ private Set<GroupingDefinition> buildModuleGroupings(Map<List<String>, GroupingBuilder> addedGroupings) {
final Set<GroupingDefinition> groupings = new HashSet<GroupingDefinition>();
- for (Map.Entry<List<String>, GroupingBuilder> entry : addedGroupings
- .entrySet()) {
+ for (Map.Entry<List<String>, GroupingBuilder> entry : addedGroupings.entrySet()) {
if (entry.getKey().size() == 2) {
groupings.add(entry.getValue().build());
}
* @param addedRpcs
* @return set of built RpcDefinition objects
*/
- private Set<RpcDefinition> buildModuleRpcs(
- Map<List<String>, RpcDefinitionBuilder> addedRpcs) {
+ private Set<RpcDefinition> buildModuleRpcs(Map<List<String>, RpcDefinitionBuilder> addedRpcs) {
final Set<RpcDefinition> rpcs = new HashSet<RpcDefinition>();
RpcDefinitionBuilder builder;
- for (Map.Entry<List<String>, RpcDefinitionBuilder> entry : addedRpcs
- .entrySet()) {
+ for (Map.Entry<List<String>, RpcDefinitionBuilder> entry : addedRpcs.entrySet()) {
builder = entry.getValue();
RpcDefinition rpc = builder.build();
rpcs.add(rpc);
* @param addedTypedefs
* @return set of built module typedef statements
*/
- private Set<TypeDefinition<?>> buildModuleTypedefs(
- Map<List<String>, TypeDefinitionBuilder> addedTypedefs) {
+ private Set<TypeDefinition<?>> buildModuleTypedefs(Map<List<String>, TypeDefinitionBuilder> addedTypedefs) {
Set<TypeDefinition<?>> typedefs = new HashSet<TypeDefinition<?>>();
- for (Map.Entry<List<String>, TypeDefinitionBuilder> entry : addedTypedefs
- .entrySet()) {
+ for (Map.Entry<List<String>, TypeDefinitionBuilder> entry : addedTypedefs.entrySet()) {
List<String> key = entry.getKey();
TypeDefinitionBuilder typedefBuilder = entry.getValue();
if (key.size() == 2) {
- TypeDefinition<? extends TypeDefinition<?>> node = typedefBuilder
- .build();
+ TypeDefinition<? extends TypeDefinition<?>> node = typedefBuilder.build();
typedefs.add(node);
}
}
* @param addedUsesNodes
* @return set of built module uses nodes
*/
- private Set<UsesNode> buildUsesNodes(
- Map<List<String>, UsesNodeBuilder> addedUsesNodes) {
+ private Set<UsesNode> buildUsesNodes(Map<List<String>, UsesNodeBuilder> addedUsesNodes) {
final Set<UsesNode> usesNodeDefs = new HashSet<UsesNode>();
- for (Map.Entry<List<String>, UsesNodeBuilder> entry : addedUsesNodes
- .entrySet()) {
+ for (Map.Entry<List<String>, UsesNodeBuilder> entry : addedUsesNodes.entrySet()) {
if (entry.getKey().size() == 2) {
usesNodeDefs.add(entry.getValue().build());
}
* @param addedFeatures
* @return set of built module features
*/
- private Set<FeatureDefinition> buildModuleFeatures(
- Map<List<String>, FeatureBuilder> addedFeatures) {
+ private Set<FeatureDefinition> buildModuleFeatures(Map<List<String>, FeatureBuilder> addedFeatures) {
Set<FeatureDefinition> features = new HashSet<FeatureDefinition>();
- for (Map.Entry<List<String>, FeatureBuilder> entry : addedFeatures
- .entrySet()) {
+ for (Map.Entry<List<String>, FeatureBuilder> entry : addedFeatures.entrySet()) {
if (entry.getKey().size() == 2) {
features.add(entry.getValue().build());
}
return features;
}
+ private List<UnknownSchemaNode> buildModuleUnknownNodes(
+ final Map<List<String>, UnknownSchemaNodeBuilder> addedUnknownNodes) {
+ final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
+ for (Map.Entry<List<String>, UnknownSchemaNodeBuilder> entry : addedUnknownNodes.entrySet()) {
+ final List<String> path = entry.getKey();
+ final UnknownSchemaNodeBuilder child = entry.getValue();
+ if (path.size() == 2) {
+ final UnknownSchemaNode node = child.build();
+ unknownNodes.add(node);
+ }
+ }
+ return unknownNodes;
+ }
+
}
import java.util.Set;
import org.opendaylight.controller.yang.common.QName;
+import org.opendaylight.controller.yang.model.api.AugmentationSchema;
import org.opendaylight.controller.yang.model.api.DataSchemaNode;
import org.opendaylight.controller.yang.model.api.GroupingDefinition;
import org.opendaylight.controller.yang.model.api.NotificationDefinition;
-import org.opendaylight.controller.yang.model.api.SchemaNode;
import org.opendaylight.controller.yang.model.api.SchemaPath;
import org.opendaylight.controller.yang.model.api.Status;
import org.opendaylight.controller.yang.model.api.TypeDefinition;
import org.opendaylight.controller.yang.model.api.UnknownSchemaNode;
import org.opendaylight.controller.yang.model.api.UsesNode;
import org.opendaylight.controller.yang.parser.builder.api.AbstractDataNodeContainerBuilder;
+import org.opendaylight.controller.yang.parser.builder.api.AugmentationSchemaBuilder;
+import org.opendaylight.controller.yang.parser.builder.api.AugmentationTargetBuilder;
import org.opendaylight.controller.yang.parser.builder.api.DataSchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.api.GroupingBuilder;
import org.opendaylight.controller.yang.parser.builder.api.SchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.api.UsesNodeBuilder;
public final class NotificationBuilder extends AbstractDataNodeContainerBuilder
- implements TypeDefinitionAwareBuilder, SchemaNodeBuilder {
+ implements TypeDefinitionAwareBuilder, SchemaNodeBuilder, AugmentationTargetBuilder {
private boolean isBuilt;
private final NotificationDefinitionImpl instance;
private final int line;
private SchemaPath schemaPath;
+ private String description;
+ private String reference;
+ private Status status = Status.CURRENT;
private final Set<TypeDefinitionBuilder> addedTypedefs = new HashSet<TypeDefinitionBuilder>();
private final Set<UsesNodeBuilder> addedUsesNodes = new HashSet<UsesNodeBuilder>();
+ private Set<AugmentationSchema> augmentations;
+ private final Set<AugmentationSchemaBuilder> addedAugmentations = new HashSet<AugmentationSchemaBuilder>();
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
NotificationBuilder(final QName qname, final int line) {
}
@Override
- public SchemaNode build() {
+ public NotificationDefinition build() {
if (!isBuilt) {
instance.setPath(schemaPath);
+ instance.setDescription(description);
+ instance.setReference(reference);
+ instance.setStatus(status);
// CHILD NODES
final Map<QName, DataSchemaNode> childs = new HashMap<QName, DataSchemaNode>();
- for (DataSchemaNodeBuilder node : childNodes) {
+ for (DataSchemaNodeBuilder node : addedChildNodes) {
childs.put(node.getQName(), node.build());
}
instance.setChildNodes(childs);
// GROUPINGS
final Set<GroupingDefinition> groupingDefs = new HashSet<GroupingDefinition>();
- for (GroupingBuilder builder : groupings) {
+ for (GroupingBuilder builder : addedGroupings) {
groupingDefs.add(builder.build());
}
instance.setGroupings(groupingDefs);
}
instance.setUses(uses);
+ // AUGMENTATIONS
+ if(augmentations == null) {
+ augmentations = new HashSet<AugmentationSchema>();
+ for (AugmentationSchemaBuilder builder : addedAugmentations) {
+ augmentations.add(builder.build());
+ }
+ }
+ instance.setAvailableAugmentations(augmentations);
+
// UNKNOWN NODES
final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
return instance;
}
+ @Override
+ public void rebuild() {
+ isBuilt = false;
+ build();
+ }
+
@Override
public int getLine() {
return line;
this.schemaPath = schemaPath;
}
+ @Override
+ public String getDescription() {
+ return description;
+ }
+
@Override
public void setDescription(final String description) {
- instance.setDescription(description);
+ this.description = description;
+ }
+
+ @Override
+ public String getReference() {
+ return reference;
}
@Override
public void setReference(final String reference) {
- instance.setReference(reference);
+ this.reference = reference;
+ }
+
+ @Override
+ public Status getStatus() {
+ return status;
}
@Override
public void setStatus(final Status status) {
- instance.setStatus(status);
+ if(status != null) {
+ this.status = status;
+ }
+ }
+
+ public Set<AugmentationSchemaBuilder> getAugmentations() {
+ return addedAugmentations;
+ }
+
+ @Override
+ public void addAugmentation(AugmentationSchemaBuilder augment) {
+ addedAugmentations.add(augment);
+ }
+
+ public void setAugmentations(final Set<AugmentationSchema> augmentations) {
+ this.augmentations = augmentations;
}
@Override
return "notification " + getQName().getLocalName();
}
- private final class NotificationDefinitionImpl implements NotificationDefinition {
+ public final class NotificationDefinitionImpl implements NotificationDefinition {
private final QName qname;
private SchemaPath path;
private String description;
private Set<GroupingDefinition> groupings = Collections.emptySet();
private Set<TypeDefinition<?>> typeDefinitions = Collections.emptySet();
private Set<UsesNode> uses = Collections.emptySet();
+ private Set<AugmentationSchema> augmentations = Collections.emptySet();
private List<UnknownSchemaNode> unknownNodes = Collections.emptyList();
private NotificationDefinitionImpl(final QName qname) {
}
}
+ @Override
+ public Set<AugmentationSchema> getAvailableAugmentations() {
+ return augmentations;
+ }
+
+ private void setAvailableAugmentations(
+ Set<AugmentationSchema> augmentations) {
+ if (augmentations != null) {
+ this.augmentations = augmentations;
+ }
+ }
+
@Override
public List<UnknownSchemaNode> getUnknownSchemaNodes() {
return unknownNodes;
return result;
}
+ public NotificationBuilder toBuilder() {
+ return NotificationBuilder.this;
+ }
+
@Override
public int hashCode() {
final int prime = 31;
private final int line;
private final QName qname;
private SchemaPath schemaPath;
+ private String description;
+ private String reference;
+ private Status status = Status.CURRENT;
private ContainerSchemaNodeBuilder inputBuilder;
private ContainerSchemaNodeBuilder outputBuilder;
private final Set<TypeDefinitionBuilder> addedTypedefs = new HashSet<TypeDefinitionBuilder>();
@Override
public RpcDefinition build() {
if (!isBuilt) {
+ instance.setDescription(description);
+ instance.setReference(reference);
+ instance.setStatus(status);
+
final ContainerSchemaNode input = inputBuilder == null ? null : inputBuilder.build();
final ContainerSchemaNode output = outputBuilder == null ? null : outputBuilder.build();
instance.setInput(input);
this.schemaPath = schemaPath;
}
+ @Override
+ public String getDescription() {
+ return description;
+ }
+
@Override
public void setDescription(final String description) {
- instance.setDescription(description);
+ this.description = description;
+ }
+
+ @Override
+ public String getReference() {
+ return reference;
+ }
+
+ @Override
+ public void setReference(String reference) {
+ this.reference = reference;
}
@Override
- public void setReference(final String reference) {
- instance.setReference(reference);
+ public Status getStatus() {
+ return status;
}
@Override
public void setStatus(final Status status) {
- instance.setStatus(status);
+ if (status != null) {
+ this.status = status;
+ }
}
@Override
@Override
public int hashCode() {
- return qname.hashCode();
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((qname == null) ? 0 : qname.hashCode());
+ result = prime * result + ((schemaPath == null) ? 0 : schemaPath.hashCode());
+ return result;
}
@Override
} else if (!other.qname.equals(this.qname)) {
return false;
}
+ if (other.schemaPath == null) {
+ if (this.schemaPath != null) {
+ return false;
+ }
+ } else if (!other.schemaPath.equals(this.schemaPath)) {
+ return false;
+ }
return true;
}
private final int line;
private final QName qname;
private SchemaPath schemaPath;
+ private String description;
+ private String reference;
+ private Status status = Status.CURRENT;
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
private QName nodeType;
private String nodeParameter;
@Override
public UnknownSchemaNode build() {
- if(!isBuilt) {
+ if (!isBuilt) {
instance.setPath(schemaPath);
instance.setNodeType(nodeType);
instance.setNodeParameter(nodeParameter);
+ instance.setDescription(description);
+ instance.setReference(reference);
+ instance.setStatus(status);
// UNKNOWN NODES
final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
this.schemaPath = schemaPath;
}
+ @Override
+ public String getDescription() {
+ return description;
+ }
+
@Override
public void setDescription(final String description) {
- instance.setDescription(description);
+ this.description = description;
+ }
+
+ @Override
+ public String getReference() {
+ return reference;
}
@Override
- public void setReference(final String reference) {
- instance.setReference(reference);
+ public void setReference(String reference) {
+ this.reference = reference;
}
@Override
- public void setStatus(final Status status) {
- instance.setStatus(status);
+ public Status getStatus() {
+ return status;
+ }
+
+ @Override
+ public void setStatus(Status status) {
+ if (status != null) {
+ this.status = status;
+ }
}
@Override
return unknownNodes;
}
- private void setUnknownSchemaNodes(
- final List<UnknownSchemaNode> unknownNodes) {
+ private void setUnknownSchemaNodes(final List<UnknownSchemaNode> unknownNodes) {
if (unknownNodes != null) {
this.unknownNodes = unknownNodes;
}
if (module.getName().equals(name)) {
return module;
}
- } else if (module.getName().equals(name)
- && module.getRevision().equals(revision)) {
+ } else if (module.getName().equals(name) && module.getRevision().equals(revision)) {
return module;
}
}
--- /dev/null
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.yang.parser.impl;
+
+import org.antlr.v4.runtime.BaseErrorListener;
+import org.antlr.v4.runtime.RecognitionException;
+import org.antlr.v4.runtime.Recognizer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+final class YangErrorListener extends BaseErrorListener {
+ private final static Logger logger = LoggerFactory.getLogger(YangErrorListener.class);
+
+ @Override
+ public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine,
+ String msg, RecognitionException e) {
+ logger.warn("line " + line + ":" + charPositionInLine + " " + msg);
+ }
+
+}
*/
package org.opendaylight.controller.yang.parser.impl;
+import static org.opendaylight.controller.yang.parser.util.ParserUtils.*;
+
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.opendaylight.controller.antlrv4.code.gen.YangLexer;
import org.opendaylight.controller.antlrv4.code.gen.YangParser;
import org.opendaylight.controller.yang.common.QName;
+import org.opendaylight.controller.yang.model.api.GroupingDefinition;
+import org.opendaylight.controller.yang.model.api.IdentitySchemaNode;
import org.opendaylight.controller.yang.model.api.Module;
-import org.opendaylight.controller.yang.model.api.ModuleImport;
import org.opendaylight.controller.yang.model.api.SchemaContext;
import org.opendaylight.controller.yang.model.api.SchemaPath;
import org.opendaylight.controller.yang.model.api.TypeDefinition;
-import org.opendaylight.controller.yang.model.api.type.BinaryTypeDefinition;
-import org.opendaylight.controller.yang.model.api.type.DecimalTypeDefinition;
-import org.opendaylight.controller.yang.model.api.type.IntegerTypeDefinition;
-import org.opendaylight.controller.yang.model.api.type.StringTypeDefinition;
import org.opendaylight.controller.yang.model.parser.api.YangModelParser;
import org.opendaylight.controller.yang.model.util.ExtendedType;
import org.opendaylight.controller.yang.model.util.IdentityrefType;
import org.opendaylight.controller.yang.model.util.UnknownType;
import org.opendaylight.controller.yang.parser.builder.api.AugmentationSchemaBuilder;
-import org.opendaylight.controller.yang.parser.builder.api.AugmentationTargetBuilder;
import org.opendaylight.controller.yang.parser.builder.api.Builder;
import org.opendaylight.controller.yang.parser.builder.api.DataNodeContainerBuilder;
-import org.opendaylight.controller.yang.parser.builder.api.DataSchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.api.GroupingBuilder;
import org.opendaylight.controller.yang.parser.builder.api.SchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.api.TypeAwareBuilder;
import org.opendaylight.controller.yang.parser.builder.api.TypeDefinitionBuilder;
import org.opendaylight.controller.yang.parser.builder.api.UsesNodeBuilder;
-import org.opendaylight.controller.yang.parser.builder.impl.AnyXmlBuilder;
-import org.opendaylight.controller.yang.parser.builder.impl.ChoiceBuilder;
-import org.opendaylight.controller.yang.parser.builder.impl.ContainerSchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.IdentitySchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.IdentityrefTypeBuilder;
-import org.opendaylight.controller.yang.parser.builder.impl.LeafListSchemaNodeBuilder;
-import org.opendaylight.controller.yang.parser.builder.impl.LeafSchemaNodeBuilder;
-import org.opendaylight.controller.yang.parser.builder.impl.ListSchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.ModuleBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.RpcDefinitionBuilder;
-import org.opendaylight.controller.yang.parser.builder.impl.TypeDefinitionBuilderImpl;
import org.opendaylight.controller.yang.parser.builder.impl.UnionTypeBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.UnknownSchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.util.ModuleDependencySort;
-import org.opendaylight.controller.yang.parser.util.ParserUtils;
import org.opendaylight.controller.yang.parser.util.RefineHolder;
+import org.opendaylight.controller.yang.parser.util.RefineUtils;
import org.opendaylight.controller.yang.parser.util.TypeConstraints;
import org.opendaylight.controller.yang.parser.util.YangParseException;
import org.opendaylight.controller.yang.validator.YangModelBasicValidator;
private static final Logger logger = LoggerFactory.getLogger(YangParserImpl.class);
@Override
- public Map<File, Module> parseYangModelsMapped(List<File> yangFiles) {
+ public Set<Module> parseYangModels(final List<File> yangFiles) {
+ return Sets.newLinkedHashSet(parseYangModelsMapped(yangFiles).values());
+ }
+
+ @Override
+ public Set<Module> parseYangModels(final List<File> yangFiles, final SchemaContext context) {
if (yangFiles != null) {
final Map<InputStream, File> inputStreams = Maps.newHashMap();
final Map<String, TreeMap<Date, ModuleBuilder>> modules = resolveModuleBuilders(
Lists.newArrayList(inputStreams.keySet()), builderToStreamMap);
- // return new LinkedHashSet<Module>(build(modules).values());
+
+ for (InputStream is : inputStreams.keySet()) {
+ try {
+ is.close();
+ } catch (IOException e) {
+ logger.debug("Failed to close stream.");
+ }
+ }
+
+ return new LinkedHashSet<Module>(buildWithContext(modules, context).values());
+ }
+ return Collections.emptySet();
+ }
+
+ @Override
+ public Set<Module> parseYangModelsFromStreams(final List<InputStream> yangModelStreams) {
+ return Sets.newHashSet(parseYangModelsFromStreamsMapped(yangModelStreams).values());
+ }
+
+ @Override
+ public Set<Module> parseYangModelsFromStreams(final List<InputStream> yangModelStreams, SchemaContext context) {
+ if (yangModelStreams != null) {
+ Map<ModuleBuilder, InputStream> builderToStreamMap = Maps.newHashMap();
+ final Map<String, TreeMap<Date, ModuleBuilder>> modules = resolveModuleBuildersWithContext(
+ yangModelStreams, builderToStreamMap, context);
+ return new LinkedHashSet<Module>(buildWithContext(modules, context).values());
+ }
+ return Collections.emptySet();
+ }
+
+ @Override
+ public Map<File, Module> parseYangModelsMapped(List<File> yangFiles) {
+ if (yangFiles != null) {
+ final Map<InputStream, File> inputStreams = Maps.newHashMap();
+
+ for (final File yangFile : yangFiles) {
+ try {
+ inputStreams.put(new FileInputStream(yangFile), yangFile);
+ } catch (FileNotFoundException e) {
+ logger.warn("Exception while reading yang file: " + yangFile.getName(), e);
+ }
+ }
+
+ Map<ModuleBuilder, InputStream> builderToStreamMap = Maps.newHashMap();
+ final Map<String, TreeMap<Date, ModuleBuilder>> modules = resolveModuleBuilders(
+ Lists.newArrayList(inputStreams.keySet()), builderToStreamMap);
+
+ for (InputStream is : inputStreams.keySet()) {
+ try {
+ is.close();
+ } catch (IOException e) {
+ logger.debug("Failed to close stream.");
+ }
+ }
Map<File, Module> retVal = Maps.newLinkedHashMap();
Map<ModuleBuilder, Module> builderToModuleMap = build(modules);
return Collections.emptyMap();
}
- @Override
- public Set<Module> parseYangModels(final List<File> yangFiles) {
- return Sets.newLinkedHashSet(parseYangModelsMapped(yangFiles).values());
- }
-
- @Override
- public Set<Module> parseYangModelsFromStreams(final List<InputStream> yangModelStreams) {
- return Sets.newHashSet(parseYangModelsFromStreamsMapped(yangModelStreams).values());
- }
-
@Override
public Map<InputStream, Module> parseYangModelsFromStreamsMapped(final List<InputStream> yangModelStreams) {
Map<ModuleBuilder, InputStream> builderToStreamMap = Maps.newHashMap();
private Map<String, TreeMap<Date, ModuleBuilder>> resolveModuleBuilders(final List<InputStream> yangFileStreams,
Map<ModuleBuilder, InputStream> streamToBuilderMap) {
+ return resolveModuleBuildersWithContext(yangFileStreams, streamToBuilderMap, null);
+ }
+ private Map<String, TreeMap<Date, ModuleBuilder>> resolveModuleBuildersWithContext(
+ final List<InputStream> yangFileStreams, final Map<ModuleBuilder, InputStream> streamToBuilderMap,
+ final SchemaContext context) {
final ModuleBuilder[] builders = parseModuleBuilders(yangFileStreams, streamToBuilderMap);
// Linked Hash Map MUST be used because Linked Hash Map preserves ORDER
final LinkedHashMap<String, TreeMap<Date, ModuleBuilder>> modules = new LinkedHashMap<String, TreeMap<Date, ModuleBuilder>>();
// module dependency graph sorted
- List<ModuleBuilder> sorted = ModuleDependencySort.sort(builders);
+ List<ModuleBuilder> sorted = null;
+ if (context == null) {
+ sorted = ModuleDependencySort.sort(builders);
+ } else {
+ sorted = ModuleDependencySort.sortWithContext(context, builders);
+ }
- for (ModuleBuilder builder : sorted) {
+ for (final ModuleBuilder builder : sorted) {
+ if (builder == null) {
+ continue;
+ }
final String builderName = builder.getName();
Date builderRevision = builder.getRevision();
if (builderRevision == null) {
final YangLexer lexer = new YangLexer(input);
final CommonTokenStream tokens = new CommonTokenStream(lexer);
final YangParser parser = new YangParser(tokens);
+ parser.removeErrorListeners();
+ parser.addErrorListener(new YangErrorListener());
result = parser.yang();
} catch (IOException e) {
return result;
}
+ private Map<ModuleBuilder, Module> buildWithContext(final Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ SchemaContext context) {
+ // fix unresolved nodes
+ for (Map.Entry<String, TreeMap<Date, ModuleBuilder>> entry : modules.entrySet()) {
+ for (Map.Entry<Date, ModuleBuilder> childEntry : entry.getValue().entrySet()) {
+ final ModuleBuilder moduleBuilder = childEntry.getValue();
+ fixUnresolvedNodesWithContext(modules, moduleBuilder, context);
+ }
+ }
+ resolveAugmentsWithContext(modules, context);
+
+ // build
+ // LinkedHashMap MUST be used otherwise the values will not maintain
+ // order!
+ // http://docs.oracle.com/javase/6/docs/api/java/util/LinkedHashMap.html
+ final Map<ModuleBuilder, Module> result = new LinkedHashMap<ModuleBuilder, Module>();
+ for (Map.Entry<String, TreeMap<Date, ModuleBuilder>> entry : modules.entrySet()) {
+ final Map<Date, Module> modulesByRevision = new HashMap<Date, Module>();
+ for (Map.Entry<Date, ModuleBuilder> childEntry : entry.getValue().entrySet()) {
+ final ModuleBuilder moduleBuilder = childEntry.getValue();
+ final Module module = moduleBuilder.build();
+ modulesByRevision.put(childEntry.getKey(), module);
+ result.put(moduleBuilder, module);
+ }
+ }
+ return result;
+ }
+
private void fixUnresolvedNodes(final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder builder) {
resolveDirtyNodes(modules, builder);
resolveIdentities(modules, builder);
- resolveUsesRefines(modules, builder);
+ resolveUsesRefine(modules, builder);
resolveUnknownNodes(modules, builder);
}
+ private void fixUnresolvedNodesWithContext(final Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ final ModuleBuilder builder, final SchemaContext context) {
+ resolveDirtyNodesWithContext(modules, builder, context);
+ resolveIdentitiesWithContext(modules, builder, context);
+ resolveUsesRefineWithContext(modules, builder, context);
+ resolveUnknownNodesWithContext(modules, builder, context);
+ }
+
/**
* Search for dirty nodes (node which contains UnknownType) and resolve
* unknown types.
}
}
+ private void resolveDirtyNodesWithContext(final Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ final ModuleBuilder module, SchemaContext context) {
+ final Map<List<String>, TypeAwareBuilder> dirtyNodes = module.getDirtyNodes();
+ if (!dirtyNodes.isEmpty()) {
+ for (Map.Entry<List<String>, TypeAwareBuilder> entry : dirtyNodes.entrySet()) {
+ final TypeAwareBuilder nodeToResolve = entry.getValue();
+
+ if (nodeToResolve instanceof UnionTypeBuilder) {
+ // special handling for union types
+ resolveTypeUnionWithContext((UnionTypeBuilder) nodeToResolve, modules, module, context);
+ } else if (nodeToResolve.getTypedef() instanceof IdentityrefTypeBuilder) {
+ // special handling for identityref types
+ IdentityrefTypeBuilder idref = (IdentityrefTypeBuilder) nodeToResolve.getTypedef();
+ nodeToResolve.setType(new IdentityrefType(findFullQName(modules, module, idref), idref.getPath()));
+ } else {
+ resolveTypeWithContext(nodeToResolve, modules, module, context);
+ }
+ }
+ }
+ }
+
+ /**
+ * Resolve unknown type of node. It is assumed that type of node is either
+ * UnknownType or ExtendedType with UnknownType as base type.
+ *
+ * @param nodeToResolve
+ * node with type to resolve
+ * @param modules
+ * all loaded modules
+ * @param module
+ * current module
+ */
private void resolveType(final TypeAwareBuilder nodeToResolve,
- final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder builder) {
+ final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder module) {
TypeDefinitionBuilder resolvedType = null;
final int line = nodeToResolve.getLine();
final TypeDefinition<?> nodeToResolveType = nodeToResolve.getType();
final QName unknownTypeQName = nodeToResolveType.getBaseType().getQName();
- final ModuleBuilder dependentModule = findDependentModule(modules, builder, unknownTypeQName.getPrefix(), line);
+ final ModuleBuilder dependentModule = findDependentModuleBuilder(modules, module, unknownTypeQName.getPrefix(),
+ line);
final TypeDefinitionBuilder targetTypeBuilder = findTypeDefinitionBuilder(nodeToResolve.getPath(),
- dependentModule, unknownTypeQName.getLocalName(), builder.getName(), line);
+ dependentModule, unknownTypeQName.getLocalName(), module.getName(), line);
if (nodeToResolveType instanceof ExtendedType) {
final ExtendedType extType = (ExtendedType) nodeToResolveType;
- final TypeDefinitionBuilder newType = extendedTypeWithNewBaseType(nodeToResolve, targetTypeBuilder,
- extType, modules, builder);
+ final TypeDefinitionBuilder newType = extendedTypeWithNewBaseTypeBuilder(targetTypeBuilder, extType,
+ modules, module, nodeToResolve.getLine());
resolvedType = newType;
} else {
resolvedType = targetTypeBuilder;
}
// validate constraints
- final TypeConstraints constraints = findConstraints(nodeToResolve, new TypeConstraints(builder.getName(),
- nodeToResolve.getLine()), modules, builder);
+ final TypeConstraints constraints = findConstraintsFromTypeBuilder(nodeToResolve,
+ new TypeConstraints(module.getName(), nodeToResolve.getLine()), modules, module, null);
constraints.validateConstraints();
nodeToResolve.setTypedef(resolvedType);
}
+ /**
+ * Resolve unknown type of node. It is assumed that type of node is either
+ * UnknownType or ExtendedType with UnknownType as base type.
+ *
+ * @param nodeToResolve
+ * node with type to resolve
+ * @param modules
+ * all loaded modules
+ * @param module
+ * current module
+ * @param context
+ * SchemaContext containing already resolved modules
+ */
+ private void resolveTypeWithContext(final TypeAwareBuilder nodeToResolve,
+ final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder module,
+ final SchemaContext context) {
+ TypeDefinitionBuilder resolvedType = null;
+ final int line = nodeToResolve.getLine();
+ final TypeDefinition<?> nodeToResolveType = nodeToResolve.getType();
+ final QName unknownTypeQName = nodeToResolveType.getBaseType().getQName();
+ final ModuleBuilder dependentModuleBuilder = findDependentModuleBuilder(modules, module,
+ unknownTypeQName.getPrefix(), line);
+
+ if (dependentModuleBuilder == null) {
+ final Module dependentModule = findModuleFromContext(context, module, unknownTypeQName.getPrefix(), line);
+ final Set<TypeDefinition<?>> types = dependentModule.getTypeDefinitions();
+ final TypeDefinition<?> type = findTypeByName(types, unknownTypeQName.getLocalName());
+
+ if (nodeToResolveType instanceof ExtendedType) {
+ final ExtendedType extType = (ExtendedType) nodeToResolveType;
+ final TypeDefinitionBuilder newType = extendedTypeWithNewBaseType(type, extType, module,
+ nodeToResolve.getLine());
+
+ nodeToResolve.setTypedef(newType);
+ } else {
+ if(nodeToResolve instanceof TypeDefinitionBuilder) {
+ TypeDefinitionBuilder tdb = (TypeDefinitionBuilder)nodeToResolve;
+ TypeConstraints tc = findConstraintsFromTypeBuilder(nodeToResolve, new TypeConstraints(module.getName(), nodeToResolve.getLine()), modules, module, context);
+ tdb.setLengths(tc.getLength());
+ tdb.setPatterns(tc.getPatterns());
+ tdb.setRanges(tc.getRange());
+ tdb.setFractionDigits(tc.getFractionDigits());
+ }
+ nodeToResolve.setType(type);
+ }
+
+ } else {
+ final TypeDefinitionBuilder targetTypeBuilder = findTypeDefinitionBuilder(nodeToResolve.getPath(),
+ dependentModuleBuilder, unknownTypeQName.getLocalName(), module.getName(), line);
+
+ if (nodeToResolveType instanceof ExtendedType) {
+ final ExtendedType extType = (ExtendedType) nodeToResolveType;
+ final TypeDefinitionBuilder newType = extendedTypeWithNewBaseTypeBuilder(targetTypeBuilder, extType,
+ modules, module, nodeToResolve.getLine());
+ resolvedType = newType;
+ } else {
+ resolvedType = targetTypeBuilder;
+ }
+
+ // validate constraints
+ final TypeConstraints constraints = findConstraintsFromTypeBuilder(nodeToResolve, new TypeConstraints(
+ module.getName(), nodeToResolve.getLine()), modules, module, context);
+ constraints.validateConstraints();
+
+ nodeToResolve.setTypedef(resolvedType);
+ }
+ }
+
private void resolveTypeUnion(final UnionTypeBuilder union,
final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder builder) {
for (TypeDefinition<?> unionType : unionTypes) {
if (unionType instanceof UnknownType) {
final UnknownType ut = (UnknownType) unionType;
- final ModuleBuilder dependentModule = findDependentModule(modules, builder, ut.getQName().getPrefix(),
- union.getLine());
+ final ModuleBuilder dependentModule = findDependentModuleBuilder(modules, builder, ut.getQName()
+ .getPrefix(), union.getLine());
final TypeDefinitionBuilder resolvedType = findTypeDefinitionBuilder(union.getPath(), dependentModule,
ut.getQName().getLocalName(), builder.getName(), union.getLine());
union.setTypedef(resolvedType);
toRemove.add(ut);
} else if (unionType instanceof ExtendedType) {
final ExtendedType extType = (ExtendedType) unionType;
- TypeDefinition<?> extTypeBase = extType.getBaseType();
+ final TypeDefinition<?> extTypeBase = extType.getBaseType();
if (extTypeBase instanceof UnknownType) {
final UnknownType ut = (UnknownType) extTypeBase;
- final ModuleBuilder dependentModule = findDependentModule(modules, builder, ut.getQName()
+ final ModuleBuilder dependentModule = findDependentModuleBuilder(modules, builder, ut.getQName()
.getPrefix(), union.getLine());
final TypeDefinitionBuilder targetTypeBuilder = findTypeDefinitionBuilder(union.getPath(),
dependentModule, ut.getQName().getLocalName(), builder.getName(), union.getLine());
- final TypeDefinitionBuilder newType = extendedTypeWithNewBaseType(targetTypeBuilder,
- targetTypeBuilder, extType, modules, builder);
+ final TypeDefinitionBuilder newType = extendedTypeWithNewBaseTypeBuilder(targetTypeBuilder,
+ extType, modules, builder, union.getLine());
union.setTypedef(newType);
toRemove.add(extType);
unionTypes.removeAll(toRemove);
}
- private TypeDefinitionBuilder extendedTypeWithNewBaseType(final TypeAwareBuilder nodeToResolve,
- final TypeDefinitionBuilder newBaseType, final ExtendedType oldExtendedType,
- final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder builder) {
- final TypeConstraints constraints = findConstraints(nodeToResolve, new TypeConstraints(builder.getName(),
- nodeToResolve.getLine()), modules, builder);
- final TypeDefinitionBuilderImpl newType = new TypeDefinitionBuilderImpl(oldExtendedType.getQName(),
- nodeToResolve.getLine());
- newType.setTypedef(newBaseType);
- newType.setPath(oldExtendedType.getPath());
- newType.setDescription(oldExtendedType.getDescription());
- newType.setReference(oldExtendedType.getReference());
- newType.setStatus(oldExtendedType.getStatus());
- newType.setLengths(constraints.getLength());
- newType.setPatterns(constraints.getPatterns());
- newType.setRanges(constraints.getRange());
- newType.setFractionDigits(constraints.getFractionDigits());
- newType.setUnits(oldExtendedType.getUnits());
- newType.setDefaultValue(oldExtendedType.getDefaultValue());
- newType.setUnknownNodes(oldExtendedType.getUnknownSchemaNodes());
- return newType;
- }
-
- private TypeConstraints findConstraints(final TypeAwareBuilder nodeToResolve, final TypeConstraints constraints,
- final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder builder) {
-
- // union type cannot be restricted
- if (nodeToResolve instanceof UnionTypeBuilder) {
- return constraints;
- }
-
- if (nodeToResolve instanceof TypeDefinitionBuilder) {
- TypeDefinitionBuilder typedefToResolve = (TypeDefinitionBuilder) nodeToResolve;
- constraints.addFractionDigits(typedefToResolve.getFractionDigits());
- constraints.addLengths(typedefToResolve.getLengths());
- constraints.addPatterns(typedefToResolve.getPatterns());
- constraints.addRanges(typedefToResolve.getRanges());
- }
+ private void resolveTypeUnionWithContext(final UnionTypeBuilder union,
+ final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder builder,
+ final SchemaContext context) {
- TypeDefinition<?> type = nodeToResolve.getType();
- if (type == null) {
- return findConstraints(nodeToResolve.getTypedef(), constraints, modules, builder);
- } else {
- if (type instanceof UnknownType) {
- ModuleBuilder dependentModule = findDependentModule(modules, builder, type.getQName().getPrefix(),
- nodeToResolve.getLine());
- TypeDefinitionBuilder tdb = findTypeDefinitionBuilder(nodeToResolve.getPath(), dependentModule, type
- .getQName().getLocalName(), builder.getName(), nodeToResolve.getLine());
- return findConstraints(tdb, constraints, modules, dependentModule);
- } else if (type instanceof ExtendedType) {
- ExtendedType extType = (ExtendedType) type;
- constraints.addFractionDigits(extType.getFractionDigits());
- constraints.addLengths(extType.getLengths());
- constraints.addPatterns(extType.getPatterns());
- constraints.addRanges(extType.getRanges());
-
- TypeDefinition<?> base = extType.getBaseType();
- if (base instanceof UnknownType) {
- ModuleBuilder dependentModule = findDependentModule(modules, builder, base.getQName().getPrefix(),
- nodeToResolve.getLine());
- TypeDefinitionBuilder tdb = findTypeDefinitionBuilder(nodeToResolve.getPath(), dependentModule,
- base.getQName().getLocalName(), builder.getName(), nodeToResolve.getLine());
- return findConstraints(tdb, constraints, modules, dependentModule);
+ final List<TypeDefinition<?>> unionTypes = union.getTypes();
+ final List<TypeDefinition<?>> toRemove = new ArrayList<TypeDefinition<?>>();
+ for (TypeDefinition<?> unionType : unionTypes) {
+ if (unionType instanceof UnknownType) {
+ final UnknownType ut = (UnknownType) unionType;
+ final QName utQName = ut.getQName();
+ final ModuleBuilder dependentModuleBuilder = findDependentModuleBuilder(modules, builder,
+ utQName.getPrefix(), union.getLine());
+
+ if (dependentModuleBuilder == null) {
+ Module dependentModule = findModuleFromContext(context, builder, utQName.getPrefix(),
+ union.getLine());
+ Set<TypeDefinition<?>> types = dependentModule.getTypeDefinitions();
+ TypeDefinition<?> type = findTypeByName(types, utQName.getLocalName());
+ union.setType(type);
+ toRemove.add(ut);
} else {
- // it has to be base yang type
- mergeConstraints(type, constraints);
- return constraints;
+ final TypeDefinitionBuilder resolvedType = findTypeDefinitionBuilder(union.getPath(),
+ dependentModuleBuilder, utQName.getLocalName(), builder.getName(), union.getLine());
+ union.setTypedef(resolvedType);
+ toRemove.add(ut);
+ }
+
+ } else if (unionType instanceof ExtendedType) {
+ final ExtendedType extType = (ExtendedType) unionType;
+ TypeDefinition<?> extTypeBase = extType.getBaseType();
+ if (extTypeBase instanceof UnknownType) {
+ final UnknownType ut = (UnknownType) extTypeBase;
+ final QName utQName = ut.getQName();
+ final ModuleBuilder dependentModuleBuilder = findDependentModuleBuilder(modules, builder,
+ utQName.getPrefix(), union.getLine());
+
+ if (dependentModuleBuilder == null) {
+ final Module dependentModule = findModuleFromContext(context, builder, utQName.getPrefix(),
+ union.getLine());
+ Set<TypeDefinition<?>> types = dependentModule.getTypeDefinitions();
+ TypeDefinition<?> type = findTypeByName(types, utQName.getLocalName());
+ final TypeDefinitionBuilder newType = extendedTypeWithNewBaseType(type, extType, builder, 0);
+
+ union.setTypedef(newType);
+ toRemove.add(extType);
+ } else {
+ final TypeDefinitionBuilder targetTypeBuilder = findTypeDefinitionBuilder(union.getPath(),
+ dependentModuleBuilder, utQName.getLocalName(), builder.getName(), union.getLine());
+
+ final TypeDefinitionBuilder newType = extendedTypeWithNewBaseTypeBuilder(targetTypeBuilder,
+ extType, modules, builder, union.getLine());
+
+ union.setTypedef(newType);
+ toRemove.add(extType);
+ }
}
- } else {
- // it is base yang type
- mergeConstraints(type, constraints);
- return constraints;
}
}
+ unionTypes.removeAll(toRemove);
}
/**
- * Search for type definition builder by name.
+ * Go through all augment definitions and resolve them. It is expected that
+ * modules are already sorted by their dependencies. This method also finds
+ * augment target node and add child nodes to it.
*
- * @param dirtyNodeSchemaPath
- * schema path of node which contains unresolved type
- * @param dependentModule
- * module which should contains referenced type
- * @param typeName
- * name of type definition
- * @param currentModuleName
- * name of current module
- * @param line
- * current line in yang model
- * @return
+ * @param modules
+ * all available modules
*/
- private TypeDefinitionBuilder findTypeDefinitionBuilder(SchemaPath dirtyNodeSchemaPath,
- final ModuleBuilder dependentModule, final String typeName, final String currentModuleName, final int line) {
- final List<QName> path = dirtyNodeSchemaPath.getPath();
- TypeDefinitionBuilder result = null;
-
- Set<TypeDefinitionBuilder> typedefs = dependentModule.getModuleTypedefs();
- result = findTdb(typedefs, typeName);
-
- if (result == null) {
- Builder currentNode = null;
- final List<String> currentPath = new ArrayList<String>();
- currentPath.add(dependentModule.getName());
-
- for (int i = 0; i < path.size(); i++) {
- QName qname = path.get(i);
- currentPath.add(qname.getLocalName());
- currentNode = dependentModule.getModuleNode(currentPath);
-
- if (currentNode instanceof RpcDefinitionBuilder) {
- typedefs = ((RpcDefinitionBuilder) currentNode).getTypeDefinitions();
- } else if (currentNode instanceof DataNodeContainerBuilder) {
- typedefs = ((DataNodeContainerBuilder) currentNode).getTypeDefinitions();
- } else {
- typedefs = Collections.emptySet();
- }
-
- result = findTdb(typedefs, typeName);
- if (result != null) {
- break;
- }
+ private void resolveAugments(final Map<String, TreeMap<Date, ModuleBuilder>> modules) {
+ final List<ModuleBuilder> allModulesList = new ArrayList<ModuleBuilder>();
+ final Set<ModuleBuilder> allModulesSet = new HashSet<ModuleBuilder>();
+ for (Map.Entry<String, TreeMap<Date, ModuleBuilder>> entry : modules.entrySet()) {
+ for (Map.Entry<Date, ModuleBuilder> inner : entry.getValue().entrySet()) {
+ allModulesList.add(inner.getValue());
+ allModulesSet.add(inner.getValue());
}
}
- if (result != null) {
- return result;
- }
- throw new YangParseException(currentModuleName, line, "Referenced type '" + typeName + "' not found.");
- }
-
- private TypeDefinitionBuilder findTdb(Set<TypeDefinitionBuilder> types, String name) {
- for (TypeDefinitionBuilder td : types) {
- if (td.getQName().getLocalName().equals(name)) {
- return td;
+ for (int i = 0; i < allModulesList.size(); i++) {
+ final ModuleBuilder module = allModulesList.get(i);
+ // try to resolve augments in module
+ resolveAugment(modules, module);
+ // while all augments are not resolved
+ final Iterator<ModuleBuilder> allModulesIterator = allModulesSet.iterator();
+ while (!(module.getAugmentsResolved() == module.getAugments().size())) {
+ ModuleBuilder nextModule = null;
+ // try resolve other module augments
+ try {
+ nextModule = allModulesIterator.next();
+ resolveAugment(modules, nextModule);
+ } catch (NoSuchElementException e) {
+ throw new YangParseException("Failed to resolve augments in module '" + module.getName() + "'.", e);
+ }
+ // then try to resolve first module again
+ resolveAugment(modules, module);
}
}
- return null;
}
/**
- * Pull restriction from referenced type and add them to given constraints
+ * Tries to resolve augments in given module. If augment target node is not
+ * found, do nothing.
*
- * @param referencedType
- * @param constraints
+ * @param modules
+ * all available modules
+ * @param module
+ * current module
*/
- private void mergeConstraints(final TypeDefinition<?> referencedType, final TypeConstraints constraints) {
+ private void resolveAugment(final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder module) {
+ if (module.getAugmentsResolved() < module.getAugments().size()) {
+ for (AugmentationSchemaBuilder augmentBuilder : module.getAugments()) {
+
+ if (!augmentBuilder.isResolved()) {
+ final SchemaPath augmentTargetSchemaPath = augmentBuilder.getTargetPath();
+ final List<QName> path = augmentTargetSchemaPath.getPath();
+
+ final QName qname = path.get(0);
+ String prefix = qname.getPrefix();
+ if (prefix == null) {
+ prefix = module.getPrefix();
+ }
- if (referencedType instanceof DecimalTypeDefinition) {
- constraints.addRanges(((DecimalTypeDefinition) referencedType).getRangeStatements());
- constraints.addFractionDigits(((DecimalTypeDefinition) referencedType).getFractionDigits());
- } else if (referencedType instanceof IntegerTypeDefinition) {
- constraints.addRanges(((IntegerTypeDefinition) referencedType).getRangeStatements());
- } else if (referencedType instanceof StringTypeDefinition) {
- constraints.addPatterns(((StringTypeDefinition) referencedType).getPatterns());
- constraints.addLengths(((StringTypeDefinition) referencedType).getLengthStatements());
- } else if (referencedType instanceof BinaryTypeDefinition) {
- constraints.addLengths(((BinaryTypeDefinition) referencedType).getLengthConstraints());
+ final ModuleBuilder dependentModule = findDependentModuleBuilder(modules, module, prefix,
+ augmentBuilder.getLine());
+ processAugmentation(augmentBuilder, path, module, qname, dependentModule);
+ }
+
+ }
}
}
/**
- * Go through all augment definitions and resolve them. This method also
- * finds augment target node and add child nodes to it.
+ * Go through all augment definitions and resolve them. This method works in
+ * same way as {@link #resolveAugments(Map)} except that if target node is not
+ * found in loaded modules, it search for target node in given context.
*
* @param modules
- * all available modules
+ * all loaded modules
+ * @param context
+ * SchemaContext containing already resolved modules
*/
- private void resolveAugments(final Map<String, TreeMap<Date, ModuleBuilder>> modules) {
+ private void resolveAugmentsWithContext(final Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ final SchemaContext context) {
final List<ModuleBuilder> allModulesList = new ArrayList<ModuleBuilder>();
final Set<ModuleBuilder> allModulesSet = new HashSet<ModuleBuilder>();
for (Map.Entry<String, TreeMap<Date, ModuleBuilder>> entry : modules.entrySet()) {
for (int i = 0; i < allModulesList.size(); i++) {
final ModuleBuilder module = allModulesList.get(i);
// try to resolve augments in module
- resolveAugment(modules, module);
+ resolveAugmentWithContext(modules, module, context);
// while all augments are not resolved
final Iterator<ModuleBuilder> allModulesIterator = allModulesSet.iterator();
while (!(module.getAugmentsResolved() == module.getAugments().size())) {
// try resolve other module augments
try {
nextModule = allModulesIterator.next();
- resolveAugment(modules, nextModule);
+ resolveAugmentWithContext(modules, nextModule, context);
} catch (NoSuchElementException e) {
throw new YangParseException("Failed to resolve augments in module '" + module.getName() + "'.", e);
}
// then try to resolve first module again
- resolveAugment(modules, module);
+ resolveAugmentWithContext(modules, module, context);
}
}
}
/**
+ * Tries to resolve augments in given module. If augment target node is not
+ * found, do nothing.
*
* @param modules
* all available modules
* @param module
* current module
*/
- private void resolveAugment(final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder module) {
+ private void resolveAugmentWithContext(final Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ final ModuleBuilder module, final SchemaContext context) {
if (module.getAugmentsResolved() < module.getAugments().size()) {
+
for (AugmentationSchemaBuilder augmentBuilder : module.getAugments()) {
+ final int line = augmentBuilder.getLine();
if (!augmentBuilder.isResolved()) {
- final SchemaPath augmentTargetSchemaPath = augmentBuilder.getTargetPath();
- final List<QName> path = augmentTargetSchemaPath.getPath();
-
+ final List<QName> path = augmentBuilder.getTargetPath().getPath();
final QName qname = path.get(0);
String prefix = qname.getPrefix();
if (prefix == null) {
prefix = module.getPrefix();
}
- DataSchemaNodeBuilder currentParent = null;
- final ModuleBuilder dependentModule = findDependentModule(modules, module, prefix,
- augmentBuilder.getLine());
- for (DataSchemaNodeBuilder child : dependentModule.getChildNodes()) {
- final QName childQName = child.getQName();
- if (childQName.getLocalName().equals(qname.getLocalName())) {
- currentParent = child;
- break;
- }
- }
-
- if (currentParent == null) {
+ // try to find augment target module in loaded modules...
+ final ModuleBuilder dependentModuleBuilder = findDependentModuleBuilder(modules, module, prefix,
+ line);
+ if (dependentModuleBuilder == null) {
+ // perform augmentation on module from context and
+ // continue to next augment
+ processAugmentationOnContext(augmentBuilder, path, module, prefix, line, context);
continue;
- }
-
- for (int i = 1; i < path.size(); i++) {
- final QName currentQName = path.get(i);
- DataSchemaNodeBuilder newParent = null;
- for (DataSchemaNodeBuilder child : ((DataNodeContainerBuilder) currentParent).getChildNodes()) {
- final QName childQName = child.getQName();
- if (childQName.getLocalName().equals(currentQName.getLocalName())) {
- newParent = child;
- break;
- }
- }
- if (newParent == null) {
- break; // node not found, quit search
- } else {
- currentParent = newParent;
- }
- }
-
- final QName currentQName = currentParent.getQName();
- final QName lastAugmentPathElement = path.get(path.size() - 1);
- if (currentQName.getLocalName().equals(lastAugmentPathElement.getLocalName())) {
-
- if (currentParent instanceof ChoiceBuilder) {
- ParserUtils.fillAugmentTarget(augmentBuilder, (ChoiceBuilder) currentParent);
- } else {
- ParserUtils.fillAugmentTarget(augmentBuilder, (DataNodeContainerBuilder) currentParent);
- }
- ((AugmentationTargetBuilder) currentParent).addAugmentation(augmentBuilder);
- SchemaPath oldPath = currentParent.getPath();
- augmentBuilder.setTargetPath(new SchemaPath(oldPath.getPath(), oldPath.isAbsolute()));
- augmentBuilder.setResolved(true);
- module.augmentResolved();
+ } else {
+ processAugmentation(augmentBuilder, path, module, qname, dependentModuleBuilder);
}
}
baseIdentityPrefix = module.getPrefix();
baseIdentityLocalName = baseIdentityName;
}
- final ModuleBuilder dependentModule = findDependentModule(modules, module, baseIdentityPrefix,
+ final ModuleBuilder dependentModule = findDependentModuleBuilder(modules, module, baseIdentityPrefix,
identity.getLine());
final Set<IdentitySchemaNodeBuilder> dependentModuleIdentities = dependentModule.getIdentities();
}
}
+ /**
+ * Go through identity statements defined in current module and resolve
+ * their 'base' statement. Method tries to find base identity in given
+ * modules. If base identity is not found, method will search it in context.
+ *
+ * @param modules
+ * all loaded modules
+ * @param module
+ * current module
+ * @param context
+ * SchemaContext containing already resolved modules
+ */
+ private void resolveIdentitiesWithContext(final Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ final ModuleBuilder module, SchemaContext context) {
+ final Set<IdentitySchemaNodeBuilder> identities = module.getIdentities();
+ for (IdentitySchemaNodeBuilder identity : identities) {
+ final String baseIdentityName = identity.getBaseIdentityName();
+ if (baseIdentityName != null) {
+ String baseIdentityPrefix = null;
+ String baseIdentityLocalName = null;
+ if (baseIdentityName.contains(":")) {
+ final String[] splitted = baseIdentityName.split(":");
+ baseIdentityPrefix = splitted[0];
+ baseIdentityLocalName = splitted[1];
+ } else {
+ baseIdentityPrefix = module.getPrefix();
+ baseIdentityLocalName = baseIdentityName;
+ }
+ final ModuleBuilder dependentModuleBuilder = findDependentModuleBuilder(modules, module,
+ baseIdentityPrefix, identity.getLine());
+
+ if (dependentModuleBuilder == null) {
+ final Module dependentModule = findModuleFromContext(context, module, baseIdentityPrefix,
+ identity.getLine());
+ final Set<IdentitySchemaNode> dependentModuleIdentities = dependentModule.getIdentities();
+ for (IdentitySchemaNode idNode : dependentModuleIdentities) {
+ if (idNode.getQName().getLocalName().equals(baseIdentityLocalName)) {
+ identity.setBaseIdentity(idNode);
+ }
+ }
+ } else {
+ final Set<IdentitySchemaNodeBuilder> dependentModuleIdentities = dependentModuleBuilder
+ .getIdentities();
+ for (IdentitySchemaNodeBuilder idBuilder : dependentModuleIdentities) {
+ if (idBuilder.getQName().getLocalName().equals(baseIdentityLocalName)) {
+ identity.setBaseIdentity(idBuilder);
+ }
+ }
+ }
+ }
+ }
+ }
+
/**
* Go through uses statements defined in current module and resolve their
* refine statements.
* @param module
* module being resolved
*/
- private void resolveUsesRefines(final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder module) {
+ private void resolveUsesRefine(final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder module) {
final Map<List<String>, UsesNodeBuilder> moduleUses = module.getUsesNodes();
for (Map.Entry<List<String>, UsesNodeBuilder> entry : moduleUses.entrySet()) {
final UsesNodeBuilder usesNode = entry.getValue();
final int line = usesNode.getLine();
-
- GroupingBuilder targetGrouping = getTargetGrouping(usesNode, modules, module);
+ final GroupingBuilder targetGrouping = getTargetGroupingFromModules(usesNode, modules, module);
usesNode.setGroupingPath(targetGrouping.getPath());
-
for (RefineHolder refine : usesNode.getRefines()) {
- SchemaNodeBuilder refineTarget = getRefineNodeBuilderCopy(targetGrouping, refine, modules, module);
- ParserUtils.checkRefine(refineTarget, refine);
- ParserUtils.refineDefault(refineTarget, refine, line);
- if (refineTarget instanceof LeafSchemaNodeBuilder) {
- final LeafSchemaNodeBuilder leaf = (LeafSchemaNodeBuilder) refineTarget;
- ParserUtils.refineLeaf(leaf, refine, line);
- usesNode.addRefineNode(leaf);
- } else if (refineTarget instanceof ContainerSchemaNodeBuilder) {
- final ContainerSchemaNodeBuilder container = (ContainerSchemaNodeBuilder) refineTarget;
- ParserUtils.refineContainer(container, refine, line);
- usesNode.addRefineNode(container);
- } else if (refineTarget instanceof ListSchemaNodeBuilder) {
- final ListSchemaNodeBuilder list = (ListSchemaNodeBuilder) refineTarget;
- ParserUtils.refineList(list, refine, line);
- usesNode.addRefineNode(list);
- } else if (refineTarget instanceof LeafListSchemaNodeBuilder) {
- final LeafListSchemaNodeBuilder leafList = (LeafListSchemaNodeBuilder) refineTarget;
- ParserUtils.refineLeafList(leafList, refine, line);
- usesNode.addRefineNode(leafList);
- } else if (refineTarget instanceof ChoiceBuilder) {
- final ChoiceBuilder choice = (ChoiceBuilder) refineTarget;
- ParserUtils.refineChoice(choice, refine, line);
- usesNode.addRefineNode(choice);
- } else if (refineTarget instanceof AnyXmlBuilder) {
- final AnyXmlBuilder anyXml = (AnyXmlBuilder) refineTarget;
- ParserUtils.refineAnyxml(anyXml, refine, line);
- usesNode.addRefineNode(anyXml);
- } else if (refineTarget instanceof GroupingBuilder) {
- usesNode.addRefineNode(refineTarget);
- } else if (refineTarget instanceof TypeDefinitionBuilder) {
- usesNode.addRefineNode(refineTarget);
+ final SchemaNodeBuilder nodeToRefine = RefineUtils.getRefineNodeFromGroupingBuilder(targetGrouping,
+ refine, module.getName());
+ RefineUtils.performRefine(nodeToRefine, refine, line);
+ usesNode.addRefineNode(nodeToRefine);
+ }
+ }
+ }
+
+ /**
+ * Tries to search target grouping in given modules and resolve refine
+ * nodes. If grouping is not found in modules, method tries to find it in
+ * modules from context.
+ *
+ * @param modules
+ * all loaded modules
+ * @param module
+ * current module
+ * @param context
+ * SchemaContext containing already resolved modules
+ */
+ private void resolveUsesRefineWithContext(final Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ final ModuleBuilder module, SchemaContext context) {
+ final Map<List<String>, UsesNodeBuilder> moduleUses = module.getUsesNodes();
+ for (Map.Entry<List<String>, UsesNodeBuilder> entry : moduleUses.entrySet()) {
+ final UsesNodeBuilder usesNode = entry.getValue();
+ final int line = usesNode.getLine();
+
+ final GroupingBuilder targetGroupingBuilder = getTargetGroupingFromModules(usesNode, modules, module);
+ if (targetGroupingBuilder == null) {
+ final GroupingDefinition targetGrouping = getTargetGroupingFromContext(usesNode, module, context);
+ usesNode.setGroupingPath(targetGrouping.getPath());
+ for (RefineHolder refine : usesNode.getRefines()) {
+ final SchemaNodeBuilder nodeToRefine = RefineUtils.getRefineNodeFromGroupingDefinition(
+ targetGrouping, refine, module.getName());
+ RefineUtils.performRefine(nodeToRefine, refine, line);
+ usesNode.addRefineNode(nodeToRefine);
+ }
+ } else {
+ usesNode.setGroupingPath(targetGroupingBuilder.getPath());
+ for (RefineHolder refine : usesNode.getRefines()) {
+ final SchemaNodeBuilder nodeToRefine = RefineUtils.getRefineNodeFromGroupingBuilder(
+ targetGroupingBuilder, refine, module.getName());
+ RefineUtils.performRefine(nodeToRefine, refine, line);
+ usesNode.addRefineNode(nodeToRefine);
}
}
}
}
- private GroupingBuilder getTargetGrouping(final UsesNodeBuilder usesBuilder,
+ /**
+ * Search given modules for grouping by name defined in uses node.
+ *
+ * @param usesBuilder
+ * builder of uses statement
+ * @param modules
+ * all loaded modules
+ * @param module
+ * current module
+ * @return grouping with given name if found, null otherwise
+ */
+ private GroupingBuilder getTargetGroupingFromModules(final UsesNodeBuilder usesBuilder,
final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder module) {
final int line = usesBuilder.getLine();
- String groupingString = usesBuilder.getGroupingName();
+ final String groupingString = usesBuilder.getGroupingName();
String groupingPrefix;
String groupingName;
if (groupingPrefix.equals(module.getPrefix())) {
dependentModule = module;
} else {
- dependentModule = findDependentModule(modules, module, groupingPrefix, line);
+ dependentModule = findDependentModuleBuilder(modules, module, groupingPrefix, line);
+ }
+
+ if (dependentModule == null) {
+ return null;
}
List<QName> path = usesBuilder.getPath().getPath();
GroupingBuilder result = null;
Set<GroupingBuilder> groupings = dependentModule.getModuleGroupings();
- result = findGrouping(groupings, groupingName);
+ result = findGroupingBuilder(groupings, groupingName);
if (result == null) {
Builder currentNode = null;
groupings = Collections.emptySet();
}
- result = findGrouping(groupings, groupingName);
+ result = findGroupingBuilder(groupings, groupingName);
if (result != null) {
break;
}
}
}
- if (result != null) {
- return result;
- }
- throw new YangParseException(module.getName(), line, "Referenced grouping '" + groupingName + "' not found.");
- }
-
- private GroupingBuilder findGrouping(Set<GroupingBuilder> groupings, String name) {
- for (GroupingBuilder grouping : groupings) {
- if (grouping.getQName().getLocalName().equals(name)) {
- return grouping;
- }
- }
- return null;
+ return result;
}
/**
- * Find original builder of node to refine and return copy of this builder.
- * <p>
- * We must create and use a copy of builder to preserve original builder
- * state, because this object will be refined (modified) and later added to
- * {@link UsesNodeBuilder}.
- * </p>
+ * Search context for grouping by name defined in uses node.
*
- * @param groupingPath
- * path to grouping which contains node to refine
- * @param refine
- * refine object containing informations about refine
- * @param modules
- * all loaded modules
+ * @param usesBuilder
+ * builder of uses statement
* @param module
* current module
- * @return copy of node to be refined if it is present in grouping, null
- * otherwise
+ * @param context
+ * SchemaContext containing already resolved modules
+ * @return grouping with given name if found, null otherwise
*/
- private SchemaNodeBuilder getRefineNodeBuilderCopy(final GroupingBuilder targetGrouping, final RefineHolder refine,
- final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder module) {
- Builder result = null;
- final Builder lookedUpBuilder = findRefineTargetBuilder(targetGrouping, refine, modules, module);
- if (lookedUpBuilder instanceof LeafSchemaNodeBuilder) {
- result = ParserUtils.copyLeafBuilder((LeafSchemaNodeBuilder) lookedUpBuilder);
- } else if (lookedUpBuilder instanceof ContainerSchemaNodeBuilder) {
- result = ParserUtils.copyContainerBuilder((ContainerSchemaNodeBuilder) lookedUpBuilder);
- } else if (lookedUpBuilder instanceof ListSchemaNodeBuilder) {
- result = ParserUtils.copyListBuilder((ListSchemaNodeBuilder) lookedUpBuilder);
- } else if (lookedUpBuilder instanceof LeafListSchemaNodeBuilder) {
- result = ParserUtils.copyLeafListBuilder((LeafListSchemaNodeBuilder) lookedUpBuilder);
- } else if (lookedUpBuilder instanceof ChoiceBuilder) {
- result = ParserUtils.copyChoiceBuilder((ChoiceBuilder) lookedUpBuilder);
- } else if (lookedUpBuilder instanceof AnyXmlBuilder) {
- result = ParserUtils.copyAnyXmlBuilder((AnyXmlBuilder) lookedUpBuilder);
- } else if (lookedUpBuilder instanceof GroupingBuilder) {
- result = ParserUtils.copyGroupingBuilder((GroupingBuilder) lookedUpBuilder);
- } else if (lookedUpBuilder instanceof TypeDefinitionBuilder) {
- result = ParserUtils.copyTypedefBuilder((TypeDefinitionBuilderImpl) lookedUpBuilder);
- } else {
- throw new YangParseException(module.getName(), refine.getLine(), "Target '" + refine.getName()
- + "' can not be refined");
- }
- return (SchemaNodeBuilder) result;
- }
+ private GroupingDefinition getTargetGroupingFromContext(final UsesNodeBuilder usesBuilder,
+ final ModuleBuilder module, SchemaContext context) {
+ final int line = usesBuilder.getLine();
+ String groupingString = usesBuilder.getGroupingName();
+ String groupingPrefix;
+ String groupingName;
- /**
- * Find builder of refine node.
- *
- * @param groupingPath
- * path to grouping which contains node to refine
- * @param refine
- * object containing refine information
- * @param modules
- * all loaded modules
- * @param module
- * current module
- * @return Builder object of refine node if it is present in grouping, null
- * otherwise
- */
- private Builder findRefineTargetBuilder(final GroupingBuilder builder, final RefineHolder refine,
- final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder module) {
- final String refineNodeName = refine.getName();
- Builder result = builder.getChildNode(refineNodeName);
- if (result == null) {
- Set<GroupingBuilder> grps = builder.getGroupings();
- for (GroupingBuilder gr : grps) {
- if (gr.getQName().getLocalName().equals(refineNodeName)) {
- result = gr;
- break;
- }
- }
- }
- if (result == null) {
- Set<TypeDefinitionBuilder> typedefs = builder.getTypeDefinitions();
- for (TypeDefinitionBuilder typedef : typedefs) {
- if (typedef.getQName().getLocalName().equals(refineNodeName)) {
- result = typedef;
- break;
- }
+ if (groupingString.contains(":")) {
+ String[] splitted = groupingString.split(":");
+ if (splitted.length != 2 || groupingString.contains("/")) {
+ throw new YangParseException(module.getName(), line, "Invalid name of target grouping");
}
+ groupingPrefix = splitted[0];
+ groupingName = splitted[1];
+ } else {
+ groupingPrefix = module.getPrefix();
+ groupingName = groupingString;
}
- return result;
+
+ Module dependentModule = findModuleFromContext(context, module, groupingPrefix, line);
+ return findGroupingDefinition(dependentModule.getGroupings(), groupingName);
}
private QName findFullQName(final Map<String, TreeMap<Date, ModuleBuilder>> modules, final ModuleBuilder module,
}
String prefix = splittedBase[0];
String name = splittedBase[1];
- ModuleBuilder dependentModule = findDependentModule(modules, module, prefix, idref.getLine());
+ ModuleBuilder dependentModule = findDependentModuleBuilder(modules, module, prefix, idref.getLine());
result = new QName(dependentModule.getNamespace(), dependentModule.getRevision(), prefix, name);
} else {
result = new QName(module.getNamespace(), module.getRevision(), module.getPrefix(), baseString);
QName nodeType = usnb.getNodeType();
if (nodeType.getNamespace() == null || nodeType.getRevision() == null) {
try {
- ModuleBuilder dependentModule = findDependentModule(modules, module, nodeType.getPrefix(),
+ ModuleBuilder dependentModule = findDependentModuleBuilder(modules, module, nodeType.getPrefix(),
usnb.getLine());
QName newNodeType = new QName(dependentModule.getNamespace(), dependentModule.getRevision(),
nodeType.getPrefix(), nodeType.getLocalName());
}
}
- /**
- * Find dependent module based on given prefix
- *
- * @param modules
- * all available modules
- * @param module
- * current module
- * @param prefix
- * target module prefix
- * @param line
- * current line in yang model
- * @return
- */
- private ModuleBuilder findDependentModule(final Map<String, TreeMap<Date, ModuleBuilder>> modules,
- final ModuleBuilder module, final String prefix, final int line) {
- ModuleBuilder dependentModule = null;
- Date dependentModuleRevision = null;
-
- if (prefix.equals(module.getPrefix())) {
- dependentModule = module;
- } else {
- final ModuleImport dependentModuleImport = ParserUtils.getModuleImport(module, prefix);
- if (dependentModuleImport == null) {
- throw new YangParseException(module.getName(), line, "No import found with prefix '" + prefix + "'.");
- }
- final String dependentModuleName = dependentModuleImport.getModuleName();
- dependentModuleRevision = dependentModuleImport.getRevision();
+ private void resolveUnknownNodesWithContext(final Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ final ModuleBuilder module, SchemaContext context) {
+ for (UnknownSchemaNodeBuilder unknownNodeBuilder : module.getUnknownNodes()) {
+ QName nodeType = unknownNodeBuilder.getNodeType();
+ if (nodeType.getNamespace() == null || nodeType.getRevision() == null) {
+ try {
+ ModuleBuilder dependentModuleBuilder = findDependentModuleBuilder(modules, module,
+ nodeType.getPrefix(), unknownNodeBuilder.getLine());
+
+ QName newNodeType = null;
+ if (dependentModuleBuilder == null) {
+ Module dependentModule = findModuleFromContext(context, module, nodeType.getPrefix(),
+ unknownNodeBuilder.getLine());
+ newNodeType = new QName(dependentModule.getNamespace(), dependentModule.getRevision(),
+ nodeType.getPrefix(), nodeType.getLocalName());
+ } else {
+ newNodeType = new QName(dependentModuleBuilder.getNamespace(),
+ dependentModuleBuilder.getRevision(), nodeType.getPrefix(), nodeType.getLocalName());
+ }
- final TreeMap<Date, ModuleBuilder> moduleBuildersByRevision = modules.get(dependentModuleName);
- if (moduleBuildersByRevision == null) {
- throw new YangParseException(module.getName(), line, "Failed to find dependent module '"
- + dependentModuleName + "'.");
- }
- if (dependentModuleRevision == null) {
- dependentModule = moduleBuildersByRevision.lastEntry().getValue();
- } else {
- dependentModule = moduleBuildersByRevision.get(dependentModuleRevision);
+ unknownNodeBuilder.setNodeType(newNodeType);
+ } catch (YangParseException e) {
+ logger.debug(module.getName(), unknownNodeBuilder.getLine(), "Failed to find unknown node type: "
+ + nodeType);
+ }
}
}
-
- if (dependentModule == null) {
- throw new YangParseException(module.getName(), line, "Failed to find dependent module with prefix '"
- + prefix + "' and revision '" + dependentModuleRevision + "'.");
- }
- return dependentModule;
}
}
*/
package org.opendaylight.controller.yang.parser.util;
+import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.opendaylight.controller.yang.model.api.Module;
import org.opendaylight.controller.yang.model.api.ModuleImport;
+import org.opendaylight.controller.yang.model.api.SchemaContext;
import org.opendaylight.controller.yang.parser.builder.impl.ModuleBuilder;
import org.opendaylight.controller.yang.parser.impl.YangParserListenerImpl;
import org.opendaylight.controller.yang.parser.util.TopologicalSort.Node;
});
}
+ public static List<ModuleBuilder> sortWithContext(SchemaContext context, ModuleBuilder... builders) {
+ List<Object> modules = new ArrayList<Object>();
+ Collections.addAll(modules, builders);
+ modules.addAll(context.getModules());
+
+ List<Node> sorted = sortInternal(modules);
+ // Cast to ModuleBuilder from Node if possible and return
+ return Lists.transform(sorted, new Function<Node, ModuleBuilder>() {
+
+ @Override
+ public ModuleBuilder apply(Node input) {
+ if(((ModuleNodeImpl) input).getReference() instanceof ModuleBuilder) {
+ return (ModuleBuilder) ((ModuleNodeImpl) input).getReference();
+ } else {
+ return null;
+ }
+ }
+ });
+ }
+
/**
* Topological sort of module dependency graph.
*
*/
package org.opendaylight.controller.yang.parser.util;
-import java.lang.reflect.Method;
import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
import java.util.List;
+import java.util.Map;
import java.util.Set;
+import java.util.TreeMap;
import org.opendaylight.controller.yang.common.QName;
+import org.opendaylight.controller.yang.model.api.AnyXmlSchemaNode;
+import org.opendaylight.controller.yang.model.api.ChoiceCaseNode;
+import org.opendaylight.controller.yang.model.api.ChoiceNode;
+import org.opendaylight.controller.yang.model.api.ConstraintDefinition;
+import org.opendaylight.controller.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.controller.yang.model.api.DataNodeContainer;
+import org.opendaylight.controller.yang.model.api.DataSchemaNode;
+import org.opendaylight.controller.yang.model.api.GroupingDefinition;
+import org.opendaylight.controller.yang.model.api.LeafListSchemaNode;
+import org.opendaylight.controller.yang.model.api.LeafSchemaNode;
+import org.opendaylight.controller.yang.model.api.ListSchemaNode;
+import org.opendaylight.controller.yang.model.api.Module;
import org.opendaylight.controller.yang.model.api.ModuleImport;
import org.opendaylight.controller.yang.model.api.MustDefinition;
+import org.opendaylight.controller.yang.model.api.NotificationDefinition;
+import org.opendaylight.controller.yang.model.api.RevisionAwareXPath;
+import org.opendaylight.controller.yang.model.api.SchemaContext;
+import org.opendaylight.controller.yang.model.api.SchemaNode;
import org.opendaylight.controller.yang.model.api.SchemaPath;
import org.opendaylight.controller.yang.model.api.TypeDefinition;
import org.opendaylight.controller.yang.model.api.type.BinaryTypeDefinition;
import org.opendaylight.controller.yang.model.util.Int8;
import org.opendaylight.controller.yang.model.util.Leafref;
import org.opendaylight.controller.yang.model.util.StringType;
+import org.opendaylight.controller.yang.model.util.Uint16;
+import org.opendaylight.controller.yang.model.util.Uint32;
+import org.opendaylight.controller.yang.model.util.Uint64;
+import org.opendaylight.controller.yang.model.util.Uint8;
import org.opendaylight.controller.yang.model.util.UnionType;
+import org.opendaylight.controller.yang.model.util.UnknownType;
import org.opendaylight.controller.yang.parser.builder.api.AugmentationSchemaBuilder;
+import org.opendaylight.controller.yang.parser.builder.api.AugmentationTargetBuilder;
import org.opendaylight.controller.yang.parser.builder.api.Builder;
import org.opendaylight.controller.yang.parser.builder.api.DataNodeContainerBuilder;
import org.opendaylight.controller.yang.parser.builder.api.DataSchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.api.UsesNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.AnyXmlBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.ChoiceBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.ChoiceBuilder.ChoiceNodeImpl;
import org.opendaylight.controller.yang.parser.builder.impl.ChoiceCaseBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.ChoiceCaseBuilder.ChoiceCaseNodeImpl;
import org.opendaylight.controller.yang.parser.builder.impl.ConstraintsBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.ContainerSchemaNodeBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.ContainerSchemaNodeBuilder.ContainerSchemaNodeImpl;
import org.opendaylight.controller.yang.parser.builder.impl.GroupingBuilderImpl;
import org.opendaylight.controller.yang.parser.builder.impl.LeafListSchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.LeafSchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.ListSchemaNodeBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.ListSchemaNodeBuilder.ListSchemaNodeImpl;
import org.opendaylight.controller.yang.parser.builder.impl.ModuleBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.NotificationBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.NotificationBuilder.NotificationDefinitionImpl;
+import org.opendaylight.controller.yang.parser.builder.impl.RpcDefinitionBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.TypeDefinitionBuilderImpl;
+import org.opendaylight.controller.yang.parser.builder.impl.UnionTypeBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.UnknownSchemaNodeBuilder;
import org.opendaylight.controller.yang.parser.builder.impl.UsesNodeBuilderImpl;
/**
* Get module import referenced by given prefix.
- *
+ *
* @param builder
* module to search
* @param prefix
* prefix associated with import
* @return ModuleImport based on given prefix
*/
- public static ModuleImport getModuleImport(final ModuleBuilder builder,
- final String prefix) {
+ public static ModuleImport getModuleImport(final ModuleBuilder builder, final String prefix) {
ModuleImport moduleImport = null;
for (ModuleImport mi : builder.getModuleImports()) {
if (mi.getPrefix().equals(prefix)) {
return moduleImport;
}
+ /**
+ * Find dependent module based on given prefix
+ *
+ * @param modules
+ * all available modules
+ * @param module
+ * current module
+ * @param prefix
+ * target module prefix
+ * @param line
+ * current line in yang model
+ * @return
+ */
+ public static ModuleBuilder findDependentModuleBuilder(final Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ final ModuleBuilder module, final String prefix, final int line) {
+ ModuleBuilder dependentModule = null;
+ Date dependentModuleRevision = null;
+
+ if (prefix.equals(module.getPrefix())) {
+ dependentModule = module;
+ } else {
+ final ModuleImport dependentModuleImport = getModuleImport(module, prefix);
+ if (dependentModuleImport == null) {
+ throw new YangParseException(module.getName(), line, "No import found with prefix '" + prefix + "'.");
+ }
+ final String dependentModuleName = dependentModuleImport.getModuleName();
+ dependentModuleRevision = dependentModuleImport.getRevision();
+
+ final TreeMap<Date, ModuleBuilder> moduleBuildersByRevision = modules.get(dependentModuleName);
+ if (moduleBuildersByRevision == null) {
+ return null;
+ }
+ if (dependentModuleRevision == null) {
+ dependentModule = moduleBuildersByRevision.lastEntry().getValue();
+ } else {
+ dependentModule = moduleBuildersByRevision.get(dependentModuleRevision);
+ }
+ }
+ return dependentModule;
+ }
+
+ /**
+ * Find module from context based on prefix.
+ *
+ * @param context
+ * schema context
+ * @param currentModule
+ * current module
+ * @param prefix
+ * current prefix used to reference dependent module
+ * @param line
+ * current line in yang model
+ * @return module based on given prefix if found in context, null otherwise
+ */
+ public static Module findModuleFromContext(final SchemaContext context, final ModuleBuilder currentModule,
+ final String prefix, final int line) {
+ TreeMap<Date, Module> modulesByRevision = new TreeMap<Date, Module>();
+
+ Date dependentModuleRevision = null;
+
+ final ModuleImport dependentModuleImport = ParserUtils.getModuleImport(currentModule, prefix);
+ if (dependentModuleImport == null) {
+ throw new YangParseException(currentModule.getName(), line, "No import found with prefix '" + prefix + "'.");
+ }
+ final String dependentModuleName = dependentModuleImport.getModuleName();
+ dependentModuleRevision = dependentModuleImport.getRevision();
+
+ for (Module contextModule : context.getModules()) {
+ if (contextModule.getName().equals(dependentModuleName)) {
+ Date revision = contextModule.getRevision();
+ if (revision == null) {
+ revision = new Date(0L);
+ }
+ modulesByRevision.put(revision, contextModule);
+ break;
+ }
+ }
+
+ Module result = null;
+ if (dependentModuleRevision == null) {
+ result = modulesByRevision.get(modulesByRevision.firstKey());
+ } else {
+ result = modulesByRevision.get(dependentModuleRevision);
+ }
+
+ return result;
+ }
+
+ /**
+ * Find grouping by name.
+ *
+ * @param groupings
+ * collection of grouping builders to search
+ * @param name
+ * name of grouping
+ * @return grouping with given name if present in collection, null otherwise
+ */
+ public static GroupingBuilder findGroupingBuilder(Set<GroupingBuilder> groupings, String name) {
+ for (GroupingBuilder grouping : groupings) {
+ if (grouping.getQName().getLocalName().equals(name)) {
+ return grouping;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Find grouping by name.
+ *
+ * @param groupings
+ * collection of grouping definitions to search
+ * @param name
+ * name of grouping
+ * @return grouping with given name if present in collection, null otherwise
+ */
+ public static GroupingDefinition findGroupingDefinition(Set<GroupingDefinition> groupings, String name) {
+ for (GroupingDefinition grouping : groupings) {
+ if (grouping.getQName().getLocalName().equals(name)) {
+ return grouping;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Search types for type with given name.
+ *
+ * @param types
+ * types to search
+ * @param name
+ * name of type
+ * @return type with given name if present in collection, null otherwise
+ */
+ public static TypeDefinitionBuilder findTypedefBuilderByName(Set<TypeDefinitionBuilder> types, String name) {
+ for (TypeDefinitionBuilder td : types) {
+ if (td.getQName().getLocalName().equals(name)) {
+ return td;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Find type by name.
+ *
+ * @param types
+ * collection of types
+ * @param typeName
+ * type name
+ * @return type with given name if it is present in collection, null
+ * otherwise
+ */
+ public static TypeDefinition<?> findTypeByName(Set<TypeDefinition<?>> types, String typeName) {
+ for (TypeDefinition<?> type : types) {
+ if (type.getQName().getLocalName().equals(typeName)) {
+ return type;
+ }
+ }
+ return null;
+ }
+
/**
* Parse uses path.
- *
+ *
* @param usesPath
* as String
* @return SchemaPath from given String
if (splittedElement.length == 1) {
name = new QName(null, null, null, splittedElement[0]);
} else {
- name = new QName(null, null, splittedElement[0],
- splittedElement[1]);
+ name = new QName(null, null, splittedElement[0], splittedElement[1]);
}
path.add(name);
}
return new SchemaPath(path, absolute);
}
+ /**
+ * Pull restriction from type and add them to constraints.
+ *
+ * @param type
+ * @param constraints
+ */
+ public static void mergeConstraints(final TypeDefinition<?> type, final TypeConstraints constraints) {
+ if (type instanceof DecimalTypeDefinition) {
+ constraints.addRanges(((DecimalTypeDefinition) type).getRangeStatements());
+ constraints.addFractionDigits(((DecimalTypeDefinition) type).getFractionDigits());
+ } else if (type instanceof IntegerTypeDefinition) {
+ constraints.addRanges(((IntegerTypeDefinition) type).getRangeStatements());
+ } else if (type instanceof StringTypeDefinition) {
+ constraints.addPatterns(((StringTypeDefinition) type).getPatterns());
+ constraints.addLengths(((StringTypeDefinition) type).getLengthStatements());
+ } else if (type instanceof BinaryTypeDefinition) {
+ constraints.addLengths(((BinaryTypeDefinition) type).getLengthConstraints());
+ }
+ }
+
+ /**
+ * Find node in grouping by name.
+ *
+ * @param grouping
+ * grouping to search
+ * @param refineNodeName
+ * name of node
+ * @return builder of node with given name if present in grouping, null
+ * otherwise
+ */
+ public static Builder findRefineTargetBuilder(final GroupingBuilder grouping, final String refineNodeName) {
+ // search child nodes
+ Builder result = grouping.getChildNode(refineNodeName);
+ // search groupings
+ if (result == null) {
+ Set<GroupingBuilder> grps = grouping.getGroupings();
+ for (GroupingBuilder gr : grps) {
+ if (gr.getQName().getLocalName().equals(refineNodeName)) {
+ result = gr;
+ break;
+ }
+ }
+ }
+ // search typedefs
+ if (result == null) {
+ Set<TypeDefinitionBuilder> typedefs = grouping.getTypeDefinitions();
+ for (TypeDefinitionBuilder typedef : typedefs) {
+ if (typedef.getQName().getLocalName().equals(refineNodeName)) {
+ result = typedef;
+ break;
+ }
+ }
+ }
+ return result;
+ }
+
+ /**
+ * Find node in grouping by name.
+ *
+ * @param builder
+ * grouping to search
+ * @param refineNodeName
+ * name of node
+ * @return node with given name if present in grouping, null otherwise
+ */
+ public static Object findRefineTargetNode(final GroupingDefinition builder, final String refineNodeName) {
+ Object result = builder.getDataChildByName(refineNodeName);
+ if (result == null) {
+ Set<GroupingDefinition> grps = builder.getGroupings();
+ for (GroupingDefinition gr : grps) {
+ if (gr.getQName().getLocalName().equals(refineNodeName)) {
+ result = gr;
+ break;
+ }
+ }
+ }
+ if (result == null) {
+ Set<TypeDefinition<?>> typedefs = builder.getTypeDefinitions();
+ for (TypeDefinition<?> typedef : typedefs) {
+ if (typedef.getQName().getLocalName().equals(refineNodeName)) {
+ result = typedef;
+ break;
+ }
+ }
+ }
+ return result;
+ }
+
/**
* Add all augment's child nodes to given target.
- *
+ *
* @param augment
+ * builder of augment statement
* @param target
+ * augmentation target node
*/
- public static void fillAugmentTarget(
- final AugmentationSchemaBuilder augment,
- final DataNodeContainerBuilder target) {
+ public static void fillAugmentTarget(final AugmentationSchemaBuilder augment, final DataNodeContainerBuilder target) {
for (DataSchemaNodeBuilder builder : augment.getChildNodes()) {
builder.setAugmenting(true);
correctAugmentChildPath(builder, target.getPath());
}
}
- public static void fillAugmentTarget(
- final AugmentationSchemaBuilder augment, final ChoiceBuilder target) {
+ /**
+ * Add all augment's child nodes to given target.
+ *
+ * @param augment
+ * builder of augment statement
+ * @param target
+ * augmentation target choice node
+ */
+ public static void fillAugmentTarget(final AugmentationSchemaBuilder augment, final ChoiceBuilder target) {
for (DataSchemaNodeBuilder builder : augment.getChildNodes()) {
builder.setAugmenting(true);
correctAugmentChildPath(builder, target.getPath());
}
}
- private static void correctAugmentChildPath(
- final DataSchemaNodeBuilder childNode,
- final SchemaPath parentSchemaPath) {
-
+ private static void correctAugmentChildPath(final DataSchemaNodeBuilder childNode, final SchemaPath parentSchemaPath) {
// set correct path
- List<QName> targetNodePath = new ArrayList<QName>(
- parentSchemaPath.getPath());
+ List<QName> targetNodePath = new ArrayList<QName>(parentSchemaPath.getPath());
targetNodePath.add(childNode.getQName());
childNode.setPath(new SchemaPath(targetNodePath, true));
// set correct path for all child nodes
if (childNode instanceof DataNodeContainerBuilder) {
DataNodeContainerBuilder dataNodeContainer = (DataNodeContainerBuilder) childNode;
- for (DataSchemaNodeBuilder child : dataNodeContainer
- .getChildNodes()) {
+ for (DataSchemaNodeBuilder child : dataNodeContainer.getChildNodes()) {
correctAugmentChildPath(child, childNode.getPath());
}
}
/**
* Repair schema path of node type.
- *
+ *
* @param node
* node which contains type statement
* @param parentSchemaPath
* schema path of parent node
*/
- private static void correctTypeAwareNodePath(
- TypeAwareBuilder node, SchemaPath parentSchemaPath) {
+ private static void correctTypeAwareNodePath(final TypeAwareBuilder node, final SchemaPath parentSchemaPath) {
final QName nodeBuilderQName = node.getQName();
final TypeDefinition<?> nodeType = node.getType();
if (nodeType != null) {
if (nodeType instanceof ExtendedType) {
ExtendedType et = (ExtendedType) nodeType;
- if (nodeType
- .getQName()
- .getLocalName()
- .equals(nodeType.getBaseType().getQName()
- .getLocalName())) {
+ if (nodeType.getQName().getLocalName().equals(nodeType.getBaseType().getQName().getLocalName())) {
fd = et.getFractionDigits();
lengths = et.getLengths();
patterns = et.getPatterns();
}
}
}
- TypeDefinition<?> newType = createCorrectTypeDefinition(
- parentSchemaPath, nodeBuilderQName, nodeType);
+ TypeDefinition<?> newType = createCorrectTypeDefinition(parentSchemaPath, nodeBuilderQName, nodeType);
node.setType(newType);
} else {
TypeDefinitionBuilder nodeBuilderTypedef = node.getTypedef();
String tdbTypeName = nodeBuilderTypedef.getQName().getLocalName();
String baseTypeName = null;
if (nodeBuilderTypedef.getType() == null) {
- baseTypeName = nodeBuilderTypedef.getTypedef().getQName()
- .getLocalName();
+ baseTypeName = nodeBuilderTypedef.getTypedef().getQName().getLocalName();
} else {
- baseTypeName = nodeBuilderTypedef.getType().getQName()
- .getLocalName();
+ baseTypeName = nodeBuilderTypedef.getType().getQName().getLocalName();
}
if (!(tdbTypeName.equals(baseTypeName))) {
return;
return;
}
- SchemaPath newSchemaPath = createNewSchemaPath(
- nodeBuilderTypedef.getPath(), nodeBuilderQName,
+ SchemaPath newSchemaPath = createNewSchemaPath(nodeBuilderTypedef.getPath(), nodeBuilderQName,
nodeBuilderTypedef.getQName());
nodeBuilderTypedef.setPath(newSchemaPath);
}
/**
* Check if there are some constraints.
- *
+ *
* @param fd
* fraction digits
* @param lengths
* range constraints
* @return true, if any of constraints are present, false otherwise
*/
- private static boolean hasConstraints(final Integer fd,
- final List<LengthConstraint> lengths,
- final List<PatternConstraint> patterns,
- final List<RangeConstraint> ranges) {
- if (fd == null && (lengths == null || lengths.isEmpty())
- && (patterns == null || patterns.isEmpty())
+ private static boolean hasConstraints(final Integer fd, final List<LengthConstraint> lengths,
+ final List<PatternConstraint> patterns, final List<RangeConstraint> ranges) {
+ if (fd == null && (lengths == null || lengths.isEmpty()) && (patterns == null || patterns.isEmpty())
&& (ranges == null || ranges.isEmpty())) {
return false;
} else {
}
}
- private static TypeDefinition<?> createCorrectTypeDefinition(
- SchemaPath parentSchemaPath, QName nodeQName,
+ private static TypeDefinition<?> createCorrectTypeDefinition(SchemaPath parentSchemaPath, QName nodeQName,
TypeDefinition<?> nodeType) {
+ QName nodeTypeQName = nodeType.getQName();
+ SchemaPath newSchemaPath = createNewSchemaPath(parentSchemaPath, nodeQName, nodeTypeQName);
TypeDefinition<?> result = null;
- SchemaPath newSchemaPath = null;
+
if (nodeType != null) {
if (nodeType instanceof BinaryTypeDefinition) {
BinaryTypeDefinition binType = (BinaryTypeDefinition) nodeType;
- newSchemaPath = createNewSchemaPath(parentSchemaPath,
- nodeQName, binType.getQName());
- List<Byte> bytes = (List<Byte>) binType.getDefaultValue();
+
+ // List<Byte> bytes = (List<Byte>) binType.getDefaultValue();
+ // workaround to get rid of 'Unchecked cast' warning
+ List<Byte> bytes = new ArrayList<Byte>();
+ Object defaultValue = binType.getDefaultValue();
+ if (defaultValue instanceof List) {
+ for (Object o : List.class.cast(defaultValue)) {
+ if (o instanceof Byte) {
+ bytes.add((Byte) o);
+ }
+ }
+ }
result = new BinaryType(newSchemaPath, bytes);
} else if (nodeType instanceof BitsTypeDefinition) {
BitsTypeDefinition bitsType = (BitsTypeDefinition) nodeType;
- newSchemaPath = createNewSchemaPath(parentSchemaPath,
- nodeQName, nodeType.getQName());
result = new BitsType(newSchemaPath, bitsType.getBits());
} else if (nodeType instanceof BooleanTypeDefinition) {
- BooleanTypeDefinition booleanType = (BooleanTypeDefinition) nodeType;
- newSchemaPath = createNewSchemaPath(parentSchemaPath,
- nodeQName, booleanType.getQName());
result = new BooleanType(newSchemaPath);
} else if (nodeType instanceof DecimalTypeDefinition) {
DecimalTypeDefinition decimalType = (DecimalTypeDefinition) nodeType;
- newSchemaPath = createNewSchemaPath(parentSchemaPath,
- nodeQName, decimalType.getQName());
- result = new Decimal64(newSchemaPath,
- decimalType.getFractionDigits());
+ result = new Decimal64(newSchemaPath, decimalType.getFractionDigits());
} else if (nodeType instanceof EmptyTypeDefinition) {
- newSchemaPath = createNewSchemaPath(parentSchemaPath,
- nodeQName, nodeType.getQName());
result = new EmptyType(newSchemaPath);
} else if (nodeType instanceof EnumTypeDefinition) {
EnumTypeDefinition enumType = (EnumTypeDefinition) nodeType;
- newSchemaPath = createNewSchemaPath(parentSchemaPath,
- nodeQName, enumType.getQName());
- result = new EnumerationType(newSchemaPath,
- (EnumPair) enumType.getDefaultValue(),
- enumType.getValues());
+ result = new EnumerationType(newSchemaPath, (EnumPair) enumType.getDefaultValue(), enumType.getValues());
} else if (nodeType instanceof IdentityrefTypeDefinition) {
IdentityrefTypeDefinition idrefType = (IdentityrefTypeDefinition) nodeType;
- newSchemaPath = createNewSchemaPath(parentSchemaPath,
- nodeQName, idrefType.getQName());
- result = new IdentityrefType(idrefType.getIdentity(),
- newSchemaPath);
+ result = new IdentityrefType(idrefType.getIdentity(), newSchemaPath);
} else if (nodeType instanceof InstanceIdentifierTypeDefinition) {
InstanceIdentifierTypeDefinition instIdType = (InstanceIdentifierTypeDefinition) nodeType;
- newSchemaPath = createNewSchemaPath(parentSchemaPath,
- nodeQName, instIdType.getQName());
- return new InstanceIdentifier(newSchemaPath,
- instIdType.getPathStatement(),
+ return new InstanceIdentifier(newSchemaPath, instIdType.getPathStatement(),
instIdType.requireInstance());
} else if (nodeType instanceof StringTypeDefinition) {
- result = createNewStringType(parentSchemaPath, nodeQName,
- (StringTypeDefinition) nodeType);
+ result = createNewStringType(parentSchemaPath, nodeQName, (StringTypeDefinition) nodeType);
} else if (nodeType instanceof IntegerTypeDefinition) {
- result = createNewIntType(parentSchemaPath, nodeQName,
- (IntegerTypeDefinition) nodeType);
+ result = createNewIntType(parentSchemaPath, nodeQName, (IntegerTypeDefinition) nodeType);
} else if (nodeType instanceof UnsignedIntegerTypeDefinition) {
- result = createNewUintType(parentSchemaPath, nodeQName,
- (UnsignedIntegerTypeDefinition) nodeType);
+ result = createNewUintType(parentSchemaPath, nodeQName, (UnsignedIntegerTypeDefinition) nodeType);
} else if (nodeType instanceof LeafrefTypeDefinition) {
- newSchemaPath = createNewSchemaPath(parentSchemaPath,
- nodeQName, nodeType.getQName());
- result = new Leafref(newSchemaPath,
- ((LeafrefTypeDefinition) nodeType).getPathStatement());
+ result = new Leafref(newSchemaPath, ((LeafrefTypeDefinition) nodeType).getPathStatement());
} else if (nodeType instanceof UnionTypeDefinition) {
UnionTypeDefinition unionType = (UnionTypeDefinition) nodeType;
- newSchemaPath = createNewSchemaPath(parentSchemaPath,
- nodeQName, unionType.getQName());
return new UnionType(newSchemaPath, unionType.getTypes());
} else if (nodeType instanceof ExtendedType) {
ExtendedType extType = (ExtendedType) nodeType;
- newSchemaPath = createNewSchemaPath(parentSchemaPath,
- nodeQName, extType.getQName());
- result = createNewExtendedType(newSchemaPath, extType);
+ result = createNewExtendedType(extType, newSchemaPath);
}
}
return result;
}
- private static TypeDefinition<?> createNewExtendedType(
- SchemaPath newSchemaPath, ExtendedType oldExtendedType) {
- QName qname = oldExtendedType.getQName();
- TypeDefinition<?> baseType = oldExtendedType.getBaseType();
- String desc = oldExtendedType.getDescription();
- String ref = oldExtendedType.getReference();
- ExtendedType.Builder builder = new ExtendedType.Builder(qname,
- baseType, desc, ref, newSchemaPath);
- builder.status(oldExtendedType.getStatus());
- builder.lengths(oldExtendedType.getLengths());
- builder.patterns(oldExtendedType.getPatterns());
- builder.ranges(oldExtendedType.getRanges());
- builder.fractionDigits(oldExtendedType.getFractionDigits());
- builder.unknownSchemaNodes(oldExtendedType.getUnknownSchemaNodes());
+ /**
+ * Create new ExtendedType based on given type and with schema path.
+ *
+ * @param newPath
+ * schema path for new type
+ * @param oldType
+ * type based
+ * @return
+ */
+ private static ExtendedType createNewExtendedType(final ExtendedType oldType, final SchemaPath newPath) {
+ QName qname = oldType.getQName();
+ TypeDefinition<?> baseType = oldType.getBaseType();
+ String desc = oldType.getDescription();
+ String ref = oldType.getReference();
+ ExtendedType.Builder builder = new ExtendedType.Builder(qname, baseType, desc, ref, newPath);
+ builder.status(oldType.getStatus());
+ builder.lengths(oldType.getLengths());
+ builder.patterns(oldType.getPatterns());
+ builder.ranges(oldType.getRanges());
+ builder.fractionDigits(oldType.getFractionDigits());
+ builder.unknownSchemaNodes(oldType.getUnknownSchemaNodes());
return builder.build();
}
- private static TypeDefinition<?> createNewStringType(SchemaPath schemaPath,
- QName nodeQName, StringTypeDefinition nodeType) {
- List<QName> path = schemaPath.getPath();
- List<QName> newPath = new ArrayList<QName>(path);
+ private static StringTypeDefinition createNewStringType(final SchemaPath schemaPath, final QName nodeQName,
+ final StringTypeDefinition nodeType) {
+ final List<QName> path = schemaPath.getPath();
+ final List<QName> newPath = new ArrayList<QName>(path);
newPath.add(nodeQName);
newPath.add(nodeType.getQName());
- SchemaPath newSchemaPath = new SchemaPath(newPath,
- schemaPath.isAbsolute());
-
+ final SchemaPath newSchemaPath = new SchemaPath(newPath, schemaPath.isAbsolute());
return new StringType(newSchemaPath);
}
- private static TypeDefinition<?> createNewIntType(SchemaPath schemaPath,
- QName nodeQName, IntegerTypeDefinition type) {
- QName typeQName = type.getQName();
- SchemaPath newSchemaPath = createNewSchemaPath(schemaPath, nodeQName,
- typeQName);
- String localName = typeQName.getLocalName();
+ private static IntegerTypeDefinition createNewIntType(final SchemaPath schemaPath, final QName nodeQName,
+ final IntegerTypeDefinition type) {
+ final QName typeQName = type.getQName();
+ final SchemaPath newSchemaPath = createNewSchemaPath(schemaPath, nodeQName, typeQName);
+ final String localName = typeQName.getLocalName();
if ("int8".equals(localName)) {
return new Int8(newSchemaPath);
}
}
- private static TypeDefinition<?> createNewUintType(SchemaPath schemaPath,
- QName nodeQName, UnsignedIntegerTypeDefinition type) {
- QName typeQName = type.getQName();
- SchemaPath newSchemaPath = createNewSchemaPath(schemaPath, nodeQName,
- typeQName);
- String localName = typeQName.getLocalName();
+ private static UnsignedIntegerTypeDefinition createNewUintType(final SchemaPath schemaPath, final QName nodeQName,
+ final UnsignedIntegerTypeDefinition type) {
+ final QName typeQName = type.getQName();
+ final SchemaPath newSchemaPath = createNewSchemaPath(schemaPath, nodeQName, typeQName);
+ final String localName = typeQName.getLocalName();
if ("uint8".equals(localName)) {
- return new Int8(newSchemaPath);
+ return new Uint8(newSchemaPath);
} else if ("uint16".equals(localName)) {
- return new Int16(newSchemaPath);
+ return new Uint16(newSchemaPath);
} else if ("uint32".equals(localName)) {
- return new Int32(newSchemaPath);
+ return new Uint32(newSchemaPath);
} else if ("uint64".equals(localName)) {
- return new Int64(newSchemaPath);
+ return new Uint64(newSchemaPath);
} else {
return null;
}
}
- private static SchemaPath createNewSchemaPath(SchemaPath schemaPath,
- QName currentQName, QName qname) {
+ private static SchemaPath createNewSchemaPath(final SchemaPath schemaPath, final QName currentQName,
+ final QName qname) {
List<QName> newPath = new ArrayList<QName>(schemaPath.getPath());
newPath.add(currentQName);
newPath.add(qname);
return new SchemaPath(newPath, schemaPath.isAbsolute());
}
- public static void refineLeaf(LeafSchemaNodeBuilder leaf,
- RefineHolder refine, int line) {
- String defaultStr = refine.getDefaultStr();
- Boolean mandatory = refine.isMandatory();
- MustDefinition must = refine.getMust();
- List<UnknownSchemaNodeBuilder> unknownNodes = refine.getUnknownNodes();
-
- if (defaultStr != null && !("".equals(defaultStr))) {
- leaf.setDefaultStr(defaultStr);
- }
- if (mandatory != null) {
- leaf.getConstraints().setMandatory(mandatory);
- }
- if (must != null) {
- leaf.getConstraints().addMustDefinition(must);
- }
- if (unknownNodes != null) {
- for (UnknownSchemaNodeBuilder unknown : unknownNodes) {
- leaf.addUnknownSchemaNode(unknown);
- }
- }
- }
-
- public static void refineContainer(ContainerSchemaNodeBuilder container,
- RefineHolder refine, int line) {
- Boolean presence = refine.isPresence();
- MustDefinition must = refine.getMust();
- List<UnknownSchemaNodeBuilder> unknownNodes = refine.getUnknownNodes();
-
- if (presence != null) {
- container.setPresence(presence);
- }
- if (must != null) {
- container.getConstraints().addMustDefinition(must);
- }
- if (unknownNodes != null) {
- for (UnknownSchemaNodeBuilder unknown : unknownNodes) {
- container.addUnknownSchemaNode(unknown);
- }
- }
- }
-
- public static void refineList(ListSchemaNodeBuilder list,
- RefineHolder refine, int line) {
- MustDefinition must = refine.getMust();
- Integer min = refine.getMinElements();
- Integer max = refine.getMaxElements();
- List<UnknownSchemaNodeBuilder> unknownNodes = refine.getUnknownNodes();
-
- if (must != null) {
- list.getConstraints().addMustDefinition(must);
- }
- if (min != null) {
- list.getConstraints().setMinElements(min);
- }
- if (max != null) {
- list.getConstraints().setMaxElements(max);
- }
- if (unknownNodes != null) {
- for (UnknownSchemaNodeBuilder unknown : unknownNodes) {
- list.addUnknownSchemaNode(unknown);
- }
- }
- }
-
- public static void refineLeafList(LeafListSchemaNodeBuilder leafList,
- RefineHolder refine, int line) {
- MustDefinition must = refine.getMust();
- Integer min = refine.getMinElements();
- Integer max = refine.getMaxElements();
- List<UnknownSchemaNodeBuilder> unknownNodes = refine.getUnknownNodes();
-
- if (must != null) {
- leafList.getConstraints().addMustDefinition(must);
- }
- if (min != null) {
- leafList.getConstraints().setMinElements(min);
- }
- if (max != null) {
- leafList.getConstraints().setMaxElements(max);
- }
- if (unknownNodes != null) {
- for (UnknownSchemaNodeBuilder unknown : unknownNodes) {
- leafList.addUnknownSchemaNode(unknown);
- }
- }
- }
-
- public static void refineChoice(ChoiceBuilder choice, RefineHolder refine,
- int line) {
- String defaultStr = refine.getDefaultStr();
- Boolean mandatory = refine.isMandatory();
- List<UnknownSchemaNodeBuilder> unknownNodes = refine.getUnknownNodes();
-
- if (defaultStr != null) {
- choice.setDefaultCase(defaultStr);
- }
- if (mandatory != null) {
- choice.getConstraints().setMandatory(mandatory);
- }
- if (unknownNodes != null) {
- for (UnknownSchemaNodeBuilder unknown : unknownNodes) {
- choice.addUnknownSchemaNode(unknown);
- }
- }
- }
-
- public static void refineAnyxml(AnyXmlBuilder anyXml, RefineHolder refine,
- int line) {
- Boolean mandatory = refine.isMandatory();
- MustDefinition must = refine.getMust();
- List<UnknownSchemaNodeBuilder> unknownNodes = refine.getUnknownNodes();
-
- if (mandatory != null) {
- anyXml.getConstraints().setMandatory(mandatory);
- }
- if (must != null) {
- anyXml.getConstraints().addMustDefinition(must);
- }
- if (unknownNodes != null) {
- for (UnknownSchemaNodeBuilder unknown : unknownNodes) {
- anyXml.addUnknownSchemaNode(unknown);
- }
- }
- }
-
- public static void checkRefine(SchemaNodeBuilder node, RefineHolder refine) {
- String name = node.getQName().getLocalName();
- int line = refine.getLine();
-
- String defaultStr = refine.getDefaultStr();
- Boolean mandatory = refine.isMandatory();
- Boolean presence = refine.isPresence();
- MustDefinition must = refine.getMust();
- Integer min = refine.getMinElements();
- Integer max = refine.getMaxElements();
-
- if (node instanceof AnyXmlBuilder) {
- checkRefineDefault(node, defaultStr, line);
- checkRefinePresence(node, presence, line);
- checkRefineMinMax(name, line, min, max);
- } else if (node instanceof ChoiceBuilder) {
- checkRefinePresence(node, presence, line);
- checkRefineMust(node, must, line);
- checkRefineMinMax(name, line, min, max);
- } else if (node instanceof ContainerSchemaNodeBuilder) {
- checkRefineDefault(node, defaultStr, line);
- checkRefineMandatory(node, mandatory, line);
- checkRefineMust(node, must, line);
- checkRefineMinMax(name, line, min, max);
- } else if (node instanceof LeafSchemaNodeBuilder) {
- checkRefinePresence(node, presence, line);
- checkRefineMinMax(name, line, min, max);
- } else if (node instanceof LeafListSchemaNodeBuilder
- || node instanceof ListSchemaNodeBuilder) {
- checkRefineDefault(node, defaultStr, line);
- checkRefinePresence(node, presence, line);
- checkRefineMandatory(node, mandatory, line);
- } else if (node instanceof GroupingBuilder
- || node instanceof TypeDefinitionBuilder
- || node instanceof UsesNodeBuilder) {
- checkRefineDefault(node, defaultStr, line);
- checkRefinePresence(node, presence, line);
- checkRefineMandatory(node, mandatory, line);
- checkRefineMust(node, must, line);
- checkRefineMinMax(name, line, min, max);
- }
- }
-
- private static void checkRefineDefault(SchemaNodeBuilder node,
- String defaultStr, int line) {
- if (defaultStr != null) {
- throw new YangParseException(line, "Can not refine 'default' for '"
- + node.getQName().getLocalName() + "'.");
- }
- }
-
- private static void checkRefineMandatory(SchemaNodeBuilder node,
- Boolean mandatory, int line) {
- if (mandatory != null) {
- throw new YangParseException(line,
- "Can not refine 'mandatory' for '"
- + node.getQName().getLocalName() + "'.");
- }
- }
-
- private static void checkRefinePresence(SchemaNodeBuilder node,
- Boolean presence, int line) {
- if (presence != null) {
- throw new YangParseException(line,
- "Can not refine 'presence' for '"
- + node.getQName().getLocalName() + "'.");
- }
- }
-
- private static void checkRefineMust(SchemaNodeBuilder node,
- MustDefinition must, int line) {
- if (must != null) {
- throw new YangParseException(line, "Can not refine 'must' for '"
- + node.getQName().getLocalName() + "'.");
- }
- }
-
- private static void checkRefineMinMax(String refineTargetName,
- int refineLine, Integer min, Integer max) {
- if (min != null || max != null) {
- throw new YangParseException(refineLine,
- "Can not refine 'min-elements' or 'max-elements' for '"
- + refineTargetName + "'.");
- }
- }
-
- /**
- * Perform refine operation of following parameters:
- * <ul>
- * <li>description</li>
- * <li>reference</li>
- * <li>config</li>
- * </ul>
- *
- * These parameters may be refined for any node.
- *
- * @param node
- * node to refine
- * @param refine
- * refine holder containing values to refine
- * @param line
- * current line in yang model
- */
- public static void refineDefault(Builder node, RefineHolder refine, int line) {
- Class<? extends Builder> cls = node.getClass();
-
- String description = refine.getDescription();
- if (description != null) {
- try {
- Method method = cls.getDeclaredMethod("setDescription",
- String.class);
- method.invoke(node, description);
- } catch (Exception e) {
- throw new YangParseException(line,
- "Cannot refine description in " + cls.getName(), e);
- }
- }
-
- String reference = refine.getReference();
- if (reference != null) {
- try {
- Method method = cls.getDeclaredMethod("setReference",
- String.class);
- method.invoke(node, reference);
- } catch (Exception e) {
- throw new YangParseException(line,
- "Cannot refine reference in " + cls.getName(), e);
- }
- }
-
- Boolean config = refine.isConfig();
- if (config != null) {
- try {
- Method method = cls.getDeclaredMethod("setConfiguration",
- Boolean.TYPE);
- method.invoke(node, config);
- } catch (Exception e) {
- throw new YangParseException(line, "Cannot refine config in "
- + cls.getName(), e);
- }
- }
- }
-
- public static LeafSchemaNodeBuilder copyLeafBuilder(
- final LeafSchemaNodeBuilder old) {
- final LeafSchemaNodeBuilder copy = new LeafSchemaNodeBuilder(
- old.getQName(), old.getLine());
+ public static LeafSchemaNodeBuilder copyLeafBuilder(final LeafSchemaNodeBuilder old) {
+ final LeafSchemaNodeBuilder copy = new LeafSchemaNodeBuilder(old.getQName(), old.getLine());
final TypeDefinition<?> type = old.getType();
-
if (type == null) {
copy.setTypedef(old.getTypedef());
} else {
copy.setType(type);
}
- copy.setPath(old.getPath());
- copyConstraints(old, copy);
+ copyDataSchemaNodeArgs(old, copy);
+ copyConstraintsFromBuilder(old, copy);
for (UnknownSchemaNodeBuilder unknown : old.getUnknownNodes()) {
copy.addUnknownSchemaNode(unknown);
}
- copy.setDescription(old.getDescription());
- copy.setReference(old.getReference());
- copy.setStatus(old.getStatus());
- copy.setAugmenting(old.isAugmenting());
- copy.setConfiguration(old.isConfiguration());
copy.setDefaultStr(old.getDefaultStr());
copy.setUnits(old.getUnits());
return copy;
}
- public static ContainerSchemaNodeBuilder copyContainerBuilder(
- final ContainerSchemaNodeBuilder old) {
- final ContainerSchemaNodeBuilder copy = new ContainerSchemaNodeBuilder(
- old.getQName(), old.getLine());
- copy.setPath(old.getPath());
- copyConstraints(old, copy);
+ public static ContainerSchemaNodeBuilder copyContainerBuilder(final ContainerSchemaNodeBuilder old) {
+ final ContainerSchemaNodeBuilder copy = new ContainerSchemaNodeBuilder(old.getQName(), old.getLine());
+ copyDataSchemaNodeArgs(old, copy);
+ copyConstraintsFromBuilder(old, copy);
for (UnknownSchemaNodeBuilder unknown : old.getUnknownNodes()) {
copy.addUnknownSchemaNode(unknown);
}
for (UsesNodeBuilder use : old.getUsesNodes()) {
copy.addUsesNode(use);
}
- copy.setDescription(old.getDescription());
- copy.setReference(old.getReference());
- copy.setStatus(old.getStatus());
- copy.setAugmenting(old.isAugmenting());
- copy.setConfiguration(old.isConfiguration());
copy.setPresence(old.isPresence());
return copy;
}
- public static ListSchemaNodeBuilder copyListBuilder(
- final ListSchemaNodeBuilder old) {
- final ListSchemaNodeBuilder copy = new ListSchemaNodeBuilder(
- old.getQName(), old.getLine());
- copy.setPath(old.getPath());
- copyConstraints(old, copy);
+ public static ListSchemaNodeBuilder copyListBuilder(final ListSchemaNodeBuilder old) {
+ final ListSchemaNodeBuilder copy = new ListSchemaNodeBuilder(old.getQName(), old.getLine());
+ copyDataSchemaNodeArgs(old, copy);
+ copyConstraintsFromBuilder(old, copy);
for (UnknownSchemaNodeBuilder unknown : old.getUnknownNodes()) {
copy.addUnknownSchemaNode(unknown);
}
for (UsesNodeBuilder use : old.getUsesNodes()) {
copy.addUsesNode(use);
}
- copy.setDescription(old.getDescription());
- copy.setReference(old.getReference());
- copy.setStatus(old.getStatus());
- copy.setAugmenting(old.isAugmenting());
- copy.setConfiguration(old.isConfiguration());
copy.setUserOrdered(old.isUserOrdered());
return copy;
}
- public static LeafListSchemaNodeBuilder copyLeafListBuilder(
- final LeafListSchemaNodeBuilder old) {
- final LeafListSchemaNodeBuilder copy = new LeafListSchemaNodeBuilder(
- old.getQName(), old.getLine());
- copy.setPath(old.getPath());
- copyConstraints(old, copy);
+ public static LeafListSchemaNodeBuilder copyLeafListBuilder(final LeafListSchemaNodeBuilder old) {
+ final LeafListSchemaNodeBuilder copy = new LeafListSchemaNodeBuilder(old.getQName(), old.getLine());
+ copyDataSchemaNodeArgs(old, copy);
+ copyConstraintsFromBuilder(old, copy);
final TypeDefinition<?> type = old.getType();
if (type == null) {
copy.setTypedef(old.getTypedef());
for (UnknownSchemaNodeBuilder unknown : old.getUnknownNodes()) {
copy.addUnknownSchemaNode(unknown);
}
- copy.setDescription(old.getDescription());
- copy.setReference(old.getReference());
- copy.setStatus(old.getStatus());
- copy.setAugmenting(old.isAugmenting());
- copy.setConfiguration(old.isConfiguration());
copy.setUserOrdered(old.isUserOrdered());
return copy;
}
public static ChoiceBuilder copyChoiceBuilder(final ChoiceBuilder old) {
- final ChoiceBuilder copy = new ChoiceBuilder(old.getQName(),
- old.getLine());
- copy.setPath(old.getPath());
- copyConstraints(old, copy);
+ final ChoiceBuilder copy = new ChoiceBuilder(old.getQName(), old.getLine());
+ copyDataSchemaNodeArgs(old, copy);
+ copyConstraintsFromBuilder(old, copy);
for (ChoiceCaseBuilder caseBuilder : old.getCases()) {
copy.addChildNode(caseBuilder);
}
copy.addUnknownSchemaNode(unknown);
}
copy.setDefaultCase(old.getDefaultCase());
- copy.setDescription(old.getDescription());
- copy.setReference(old.getReference());
- copy.setStatus(old.getStatus());
- copy.setAugmenting(old.isAugmenting());
- copy.setConfiguration(old.isConfiguration());
return copy;
}
public static AnyXmlBuilder copyAnyXmlBuilder(final AnyXmlBuilder old) {
- final AnyXmlBuilder copy = new AnyXmlBuilder(old.getQName(),
- old.getLine());
- copy.setPath(old.getPath());
- copyConstraints(old, copy);
+ final AnyXmlBuilder copy = new AnyXmlBuilder(old.getQName(), old.getLine());
+ copyDataSchemaNodeArgs(old, copy);
+ copyConstraintsFromBuilder(old, copy);
for (UnknownSchemaNodeBuilder unknown : old.getUnknownNodes()) {
copy.addUnknownSchemaNode(unknown);
}
- copy.setDescription(old.getDescription());
- copy.setReference(old.getReference());
- copy.setStatus(old.getStatus());
- copy.setConfiguration(old.isConfiguration());
return copy;
}
public static GroupingBuilder copyGroupingBuilder(final GroupingBuilder old) {
- final GroupingBuilder copy = new GroupingBuilderImpl(old.getQName(),
- old.getLine());
+ final GroupingBuilder copy = new GroupingBuilderImpl(old.getQName(), old.getLine());
copy.setPath(old.getPath());
for (DataSchemaNodeBuilder child : old.getChildNodes()) {
copy.addChildNode(child);
return copy;
}
- public static TypeDefinitionBuilderImpl copyTypedefBuilder(
- TypeDefinitionBuilderImpl old) {
- final TypeDefinitionBuilderImpl copy = new TypeDefinitionBuilderImpl(
- old.getQName(), old.getLine());
+ public static TypeDefinitionBuilderImpl copyTypedefBuilder(final TypeDefinitionBuilderImpl old) {
+ final TypeDefinitionBuilderImpl copy = new TypeDefinitionBuilderImpl(old.getQName(), old.getLine());
copy.setPath(old.getPath());
copy.setDefaultValue(old.getDefaultValue());
copy.setUnits(old.getUnits());
return copy;
}
- public static UsesNodeBuilder copyUsesNodeBuilder(UsesNodeBuilder old) {
- final UsesNodeBuilder copy = new UsesNodeBuilderImpl(
- old.getGroupingName(), old.getLine());
+ public static UsesNodeBuilder copyUsesNodeBuilder(final UsesNodeBuilder old) {
+ final UsesNodeBuilder copy = new UsesNodeBuilderImpl(old.getGroupingName(), old.getLine());
for (AugmentationSchemaBuilder augment : old.getAugmentations()) {
copy.addAugment(augment);
}
return copy;
}
- private static void copyConstraints(final DataSchemaNodeBuilder oldBuilder,
+ private static void copyDataSchemaNodeArgs(final DataSchemaNodeBuilder oldBuilder,
+ final DataSchemaNodeBuilder newBuilder) {
+ newBuilder.setPath(oldBuilder.getPath());
+ newBuilder.setDescription(oldBuilder.getDescription());
+ newBuilder.setReference(oldBuilder.getReference());
+ newBuilder.setStatus(oldBuilder.getStatus());
+ newBuilder.setAugmenting(oldBuilder.isAugmenting());
+ if (!(oldBuilder instanceof ChoiceCaseNode)) {
+ newBuilder.setConfiguration(oldBuilder.isConfiguration());
+ }
+ }
+
+ /**
+ * Copy constraints from old builder to new builder.
+ *
+ * @param oldBuilder
+ * @param newBuilder
+ */
+ private static void copyConstraintsFromBuilder(final DataSchemaNodeBuilder oldBuilder,
final DataSchemaNodeBuilder newBuilder) {
final ConstraintsBuilder oldConstraints = oldBuilder.getConstraints();
final ConstraintsBuilder newConstraints = newBuilder.getConstraints();
newConstraints.setMaxElements(oldConstraints.getMaxElements());
}
+ /**
+ * Create LeafSchemaNodeBuilder from given LeafSchemaNode.
+ *
+ * @param leaf
+ * leaf from which to create builder
+ * @param line
+ * line in module
+ * @return builder object from leaf
+ */
+ public static LeafSchemaNodeBuilder createLeafBuilder(LeafSchemaNode leaf, int line) {
+ final LeafSchemaNodeBuilder builder = new LeafSchemaNodeBuilder(leaf.getQName(), line);
+ convertDataSchemaNode(leaf, builder);
+ final TypeDefinition<?> type = leaf.getType();
+ builder.setType(type);
+ builder.setPath(leaf.getPath());
+ builder.setUnknownNodes(leaf.getUnknownSchemaNodes());
+ builder.setDefaultStr(leaf.getDefault());
+ builder.setUnits(leaf.getUnits());
+ return builder;
+ }
+
+ public static ContainerSchemaNodeBuilder createContainer(ContainerSchemaNode container, int line) {
+ final ContainerSchemaNodeBuilder builder = new ContainerSchemaNodeBuilder(container.getQName(), line);
+ convertDataSchemaNode(container, builder);
+ builder.setUnknownNodes(container.getUnknownSchemaNodes());
+ builder.setChildNodes(container.getChildNodes());
+ builder.setGroupings(container.getGroupings());
+ builder.setTypedefs(container.getTypeDefinitions());
+ builder.setAugmentations(container.getAvailableAugmentations());
+ builder.setUsesnodes(container.getUses());
+ builder.setPresence(container.isPresenceContainer());
+ return builder;
+ }
+
+ public static ListSchemaNodeBuilder createList(ListSchemaNode list, int line) {
+ ListSchemaNodeBuilder builder = new ListSchemaNodeBuilder(list.getQName(), line);
+ convertDataSchemaNode(list, builder);
+ builder.setUnknownNodes(list.getUnknownSchemaNodes());
+ builder.setTypedefs(list.getTypeDefinitions());
+ builder.setChildNodes(list.getChildNodes());
+ builder.setGroupings(list.getGroupings());
+ builder.setAugmentations(list.getAvailableAugmentations());
+ builder.setUsesnodes(list.getUses());
+ builder.setUserOrdered(builder.isUserOrdered());
+ return builder;
+ }
+
+ public static LeafListSchemaNodeBuilder createLeafList(LeafListSchemaNode leafList, int line) {
+ final LeafListSchemaNodeBuilder builder = new LeafListSchemaNodeBuilder(leafList.getQName(), line);
+ convertDataSchemaNode(leafList, builder);
+ builder.setType(leafList.getType());
+ builder.setUnknownNodes(leafList.getUnknownSchemaNodes());
+ builder.setUserOrdered(leafList.isUserOrdered());
+ return builder;
+ }
+
+ public static ChoiceBuilder createChoice(ChoiceNode choice, int line) {
+ final ChoiceBuilder builder = new ChoiceBuilder(choice.getQName(), line);
+ convertDataSchemaNode(choice, builder);
+ builder.setCases(choice.getCases());
+ builder.setUnknownNodes(choice.getUnknownSchemaNodes());
+ builder.setDefaultCase(choice.getDefaultCase());
+ return builder;
+ }
+
+ public static AnyXmlBuilder createAnyXml(AnyXmlSchemaNode anyxml, int line) {
+ final AnyXmlBuilder builder = new AnyXmlBuilder(anyxml.getQName(), line);
+ convertDataSchemaNode(anyxml, builder);
+ builder.setUnknownNodes(anyxml.getUnknownSchemaNodes());
+ return builder;
+ }
+
+ public static GroupingBuilder createGrouping(GroupingDefinition grouping, int line) {
+ final GroupingBuilderImpl builder = new GroupingBuilderImpl(grouping.getQName(), line);
+ builder.setPath(grouping.getPath());
+ builder.setChildNodes(grouping.getChildNodes());
+ builder.setGroupings(grouping.getGroupings());
+ builder.setTypedefs(grouping.getTypeDefinitions());
+ builder.setUsesnodes(grouping.getUses());
+ builder.setUnknownNodes(grouping.getUnknownSchemaNodes());
+ builder.setDescription(grouping.getDescription());
+ builder.setReference(grouping.getReference());
+ builder.setStatus(grouping.getStatus());
+ return builder;
+ }
+
+ public static TypeDefinitionBuilder createTypedef(ExtendedType typedef, int line) {
+ final TypeDefinitionBuilderImpl builder = new TypeDefinitionBuilderImpl(typedef.getQName(), line);
+ builder.setPath(typedef.getPath());
+ builder.setDefaultValue(typedef.getDefaultValue());
+ builder.setUnits(typedef.getUnits());
+ builder.setDescription(typedef.getDescription());
+ builder.setReference(typedef.getReference());
+ builder.setStatus(typedef.getStatus());
+ builder.setRanges(typedef.getRanges());
+ builder.setLengths(typedef.getLengths());
+ builder.setPatterns(typedef.getPatterns());
+ builder.setFractionDigits(typedef.getFractionDigits());
+ final TypeDefinition<?> type = typedef.getBaseType();
+ builder.setType(type);
+ builder.setUnits(typedef.getUnits());
+ builder.setUnknownNodes(typedef.getUnknownSchemaNodes());
+ return builder;
+ }
+
+ /**
+ * Set DataSchemaNode arguments to builder object
+ *
+ * @param node
+ * node from which arguments should be read
+ * @param builder
+ * builder to which arguments should be set
+ */
+ private static void convertDataSchemaNode(DataSchemaNode node, DataSchemaNodeBuilder builder) {
+ builder.setPath(node.getPath());
+ builder.setDescription(node.getDescription());
+ builder.setReference(node.getReference());
+ builder.setStatus(node.getStatus());
+ builder.setAugmenting(node.isAugmenting());
+ if (!(node instanceof ChoiceCaseNode)) {
+ builder.setConfiguration(node.isConfiguration());
+ }
+ copyConstraintsFromDefinition(node.getConstraints(), builder.getConstraints());
+ }
+
+ /**
+ * Copy constraints from constraints definition to constraints builder.
+ *
+ * @param nodeConstraints
+ * definition from which constraints will be copied
+ * @param constraints
+ * builder to which constraints will be added
+ */
+ private static void copyConstraintsFromDefinition(final ConstraintDefinition nodeConstraints,
+ final ConstraintsBuilder constraints) {
+ final RevisionAwareXPath when = nodeConstraints.getWhenCondition();
+ final Set<MustDefinition> must = nodeConstraints.getMustConstraints();
+
+ if (when != null) {
+ constraints.addWhenCondition(when.toString());
+ }
+ if (must != null) {
+ for (MustDefinition md : must) {
+ constraints.addMustDefinition(md);
+ }
+ }
+ constraints.setMandatory(nodeConstraints.isMandatory());
+ constraints.setMinElements(nodeConstraints.getMinElements());
+ constraints.setMaxElements(nodeConstraints.getMaxElements());
+ }
+
+ public static void processAugmentationOnContext(final AugmentationSchemaBuilder augmentBuilder,
+ final List<QName> path, final ModuleBuilder module, final String prefix, final int line,
+ final SchemaContext context) {
+ final Module dependentModule = findModuleFromContext(context, module, prefix, line);
+ if (dependentModule == null) {
+ throw new YangParseException(module.getName(), line, "Failed to find referenced module with prefix "
+ + prefix + ".");
+ }
+ SchemaNode node = dependentModule.getDataChildByName(path.get(0).getLocalName());
+ if (node == null) {
+ Set<NotificationDefinition> notifications = dependentModule.getNotifications();
+ for (NotificationDefinition ntf : notifications) {
+ if (ntf.getQName().getLocalName().equals(path.get(0).getLocalName())) {
+ node = ntf;
+ break;
+ }
+ }
+ }
+ if (node == null) {
+ return;
+ }
+
+ for (int i = 1; i < path.size(); i++) {
+ if (node instanceof DataNodeContainer) {
+ DataNodeContainer ref = (DataNodeContainer) node;
+ node = ref.getDataChildByName(path.get(i).getLocalName());
+ }
+ }
+ if (node == null) {
+ return;
+ }
+
+ if (node instanceof ContainerSchemaNodeImpl) {
+ // includes container, input and output statement
+ ContainerSchemaNodeImpl c = (ContainerSchemaNodeImpl) node;
+ ContainerSchemaNodeBuilder cb = c.toBuilder();
+ fillAugmentTarget(augmentBuilder, cb);
+ ((AugmentationTargetBuilder) cb).addAugmentation(augmentBuilder);
+ SchemaPath oldPath = cb.getPath();
+ cb.rebuild();
+ augmentBuilder.setTargetPath(new SchemaPath(oldPath.getPath(), oldPath.isAbsolute()));
+ augmentBuilder.setResolved(true);
+ module.augmentResolved();
+ } else if (node instanceof ListSchemaNodeImpl) {
+ ListSchemaNodeImpl l = (ListSchemaNodeImpl) node;
+ ListSchemaNodeBuilder lb = l.toBuilder();
+ fillAugmentTarget(augmentBuilder, lb);
+ ((AugmentationTargetBuilder) lb).addAugmentation(augmentBuilder);
+ SchemaPath oldPath = lb.getPath();
+ lb.rebuild();
+ augmentBuilder.setTargetPath(new SchemaPath(oldPath.getPath(), oldPath.isAbsolute()));
+ augmentBuilder.setResolved(true);
+ module.augmentResolved();
+ } else if (node instanceof ChoiceNodeImpl) {
+ ChoiceNodeImpl ch = (ChoiceNodeImpl) node;
+ ChoiceBuilder chb = ch.toBuilder();
+ fillAugmentTarget(augmentBuilder, chb);
+ ((AugmentationTargetBuilder) chb).addAugmentation(augmentBuilder);
+ SchemaPath oldPath = chb.getPath();
+ chb.rebuild();
+ augmentBuilder.setTargetPath(new SchemaPath(oldPath.getPath(), oldPath.isAbsolute()));
+ augmentBuilder.setResolved(true);
+ module.augmentResolved();
+ } else if (node instanceof ChoiceCaseNodeImpl) {
+ ChoiceCaseNodeImpl chc = (ChoiceCaseNodeImpl) node;
+ ChoiceCaseBuilder chcb = chc.toBuilder();
+ fillAugmentTarget(augmentBuilder, chcb);
+ ((AugmentationTargetBuilder) chcb).addAugmentation(augmentBuilder);
+ SchemaPath oldPath = chcb.getPath();
+ chcb.rebuild();
+ augmentBuilder.setTargetPath(new SchemaPath(oldPath.getPath(), oldPath.isAbsolute()));
+ augmentBuilder.setResolved(true);
+ module.augmentResolved();
+ } else if (node instanceof NotificationDefinitionImpl) {
+ NotificationDefinitionImpl nd = (NotificationDefinitionImpl) node;
+ NotificationBuilder nb = nd.toBuilder();
+ fillAugmentTarget(augmentBuilder, nb);
+ ((AugmentationTargetBuilder) nb).addAugmentation(augmentBuilder);
+ SchemaPath oldPath = nb.getPath();
+ nb.rebuild();
+ augmentBuilder.setTargetPath(new SchemaPath(oldPath.getPath(), oldPath.isAbsolute()));
+ augmentBuilder.setResolved(true);
+ module.augmentResolved();
+ } else {
+ throw new YangParseException(module.getName(), line, "Target of type " + node.getClass()
+ + " can not be augmented.");
+ }
+ }
+
+ public static void processAugmentation(final AugmentationSchemaBuilder augmentBuilder, final List<QName> path,
+ final ModuleBuilder module, final QName qname, final ModuleBuilder dependentModuleBuilder) {
+ DataSchemaNodeBuilder currentParent = null;
+ for (DataSchemaNodeBuilder child : dependentModuleBuilder.getChildNodes()) {
+ final QName childQName = child.getQName();
+ if (childQName.getLocalName().equals(qname.getLocalName())) {
+ currentParent = child;
+ break;
+ }
+ }
+
+ if (currentParent == null) {
+ return;
+ }
+
+ for (int i = 1; i < path.size(); i++) {
+ final QName currentQName = path.get(i);
+ DataSchemaNodeBuilder newParent = null;
+ for (DataSchemaNodeBuilder child : ((DataNodeContainerBuilder) currentParent).getChildNodes()) {
+ final QName childQName = child.getQName();
+ if (childQName.getLocalName().equals(currentQName.getLocalName())) {
+ newParent = child;
+ break;
+ }
+ }
+ if (newParent == null) {
+ break; // node not found, quit search
+ } else {
+ currentParent = newParent;
+ }
+ }
+
+ final String currentName = currentParent.getQName().getLocalName();
+ final String lastAugmentPathElementName = path.get(path.size() - 1).getLocalName();
+ if (currentName.equals(lastAugmentPathElementName)) {
+
+ if (currentParent instanceof ChoiceBuilder) {
+ fillAugmentTarget(augmentBuilder, (ChoiceBuilder) currentParent);
+ } else {
+ fillAugmentTarget(augmentBuilder, (DataNodeContainerBuilder) currentParent);
+ }
+ ((AugmentationTargetBuilder) currentParent).addAugmentation(augmentBuilder);
+ SchemaPath oldPath = currentParent.getPath();
+ augmentBuilder.setTargetPath(new SchemaPath(oldPath.getPath(), oldPath.isAbsolute()));
+ augmentBuilder.setResolved(true);
+ module.augmentResolved();
+ }
+ }
+
+ /**
+ * Create new type builder based on old type with new base type.
+ *
+ * @param newBaseType
+ * new base type builder
+ * @param oldExtendedType
+ * old type
+ * @param modules
+ * all loaded modules
+ * @param module
+ * current module
+ * @param line
+ * current line in module
+ * @return new type builder based on old type with new base type
+ */
+ public static TypeDefinitionBuilder extendedTypeWithNewBaseTypeBuilder(final TypeDefinitionBuilder newBaseType,
+ final ExtendedType oldExtendedType, final Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ final ModuleBuilder module, final int line) {
+ final TypeConstraints tc = new TypeConstraints(module.getName(), line);
+ tc.addFractionDigits(oldExtendedType.getFractionDigits());
+ tc.addLengths(oldExtendedType.getLengths());
+ tc.addPatterns(oldExtendedType.getPatterns());
+ tc.addRanges(oldExtendedType.getRanges());
+
+ final TypeConstraints constraints = findConstraintsFromTypeBuilder(newBaseType, tc, modules, module, null);
+ final TypeDefinitionBuilderImpl newType = new TypeDefinitionBuilderImpl(oldExtendedType.getQName(), line);
+ newType.setTypedef(newBaseType);
+ newType.setPath(oldExtendedType.getPath());
+ newType.setDescription(oldExtendedType.getDescription());
+ newType.setReference(oldExtendedType.getReference());
+ newType.setStatus(oldExtendedType.getStatus());
+ newType.setLengths(constraints.getLength());
+ newType.setPatterns(constraints.getPatterns());
+ newType.setRanges(constraints.getRange());
+ newType.setFractionDigits(constraints.getFractionDigits());
+ newType.setUnits(oldExtendedType.getUnits());
+ newType.setDefaultValue(oldExtendedType.getDefaultValue());
+ newType.setUnknownNodes(oldExtendedType.getUnknownSchemaNodes());
+ return newType;
+ }
+
+ /**
+ * Create new type builder based on old type with new base type.
+ *
+ * @param newBaseType
+ * new base type
+ * @param oldExtendedType
+ * old type
+ * @param modules
+ * all loaded modules
+ * @param module
+ * current module
+ * @param line
+ * current line in module
+ * @return new type builder based on old type with new base type
+ */
+ public static TypeDefinitionBuilder extendedTypeWithNewBaseType(final TypeDefinition<?> newBaseType,
+ final ExtendedType oldExtendedType, final ModuleBuilder module, final int line) {
+ final TypeConstraints tc = new TypeConstraints(module.getName(), line);
+
+ final TypeConstraints constraints = findConstraintsFromTypeDefinition(newBaseType, tc);
+ final TypeDefinitionBuilderImpl newType = new TypeDefinitionBuilderImpl(oldExtendedType.getQName(), line);
+ newType.setType(newBaseType);
+ newType.setPath(oldExtendedType.getPath());
+ newType.setDescription(oldExtendedType.getDescription());
+ newType.setReference(oldExtendedType.getReference());
+ newType.setStatus(oldExtendedType.getStatus());
+ newType.setLengths(constraints.getLength());
+ newType.setPatterns(constraints.getPatterns());
+ newType.setRanges(constraints.getRange());
+ newType.setFractionDigits(constraints.getFractionDigits());
+ newType.setUnits(oldExtendedType.getUnits());
+ newType.setDefaultValue(oldExtendedType.getDefaultValue());
+ newType.setUnknownNodes(oldExtendedType.getUnknownSchemaNodes());
+ return newType;
+ }
+
+ /**
+ * Pull restrictions from type and add them to constraints.
+ *
+ * @param typeToResolve
+ * type from which constraints will be read
+ * @param constraints
+ * constraints object to which constraints will be added
+ * @return constraints contstraints object containing constraints from given
+ * type
+ */
+ private static TypeConstraints findConstraintsFromTypeDefinition(final TypeDefinition<?> typeToResolve,
+ final TypeConstraints constraints) {
+ // union type cannot be restricted
+ if (typeToResolve instanceof UnionTypeDefinition) {
+ return constraints;
+ }
+ if (typeToResolve instanceof ExtendedType) {
+ ExtendedType extType = (ExtendedType) typeToResolve;
+ constraints.addFractionDigits(extType.getFractionDigits());
+ constraints.addLengths(extType.getLengths());
+ constraints.addPatterns(extType.getPatterns());
+ constraints.addRanges(extType.getRanges());
+ return findConstraintsFromTypeDefinition(extType.getBaseType(), constraints);
+ } else {
+ mergeConstraints(typeToResolve, constraints);
+ return constraints;
+ }
+ }
+
+ public static TypeConstraints findConstraintsFromTypeBuilder(final TypeAwareBuilder nodeToResolve,
+ final TypeConstraints constraints, final Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ final ModuleBuilder builder, final SchemaContext context) {
+
+ // union type cannot be restricted
+ if (nodeToResolve instanceof UnionTypeBuilder) {
+ return constraints;
+ }
+
+ if (nodeToResolve instanceof TypeDefinitionBuilder) {
+ TypeDefinitionBuilder typedefToResolve = (TypeDefinitionBuilder) nodeToResolve;
+ constraints.addFractionDigits(typedefToResolve.getFractionDigits());
+ constraints.addLengths(typedefToResolve.getLengths());
+ constraints.addPatterns(typedefToResolve.getPatterns());
+ constraints.addRanges(typedefToResolve.getRanges());
+ }
+
+ TypeDefinition<?> type = nodeToResolve.getType();
+ if (type == null) {
+ return findConstraintsFromTypeBuilder(nodeToResolve.getTypedef(), constraints, modules, builder, context);
+ } else {
+ QName qname = type.getQName();
+ if (type instanceof UnknownType) {
+ ModuleBuilder dependentModuleBuilder = findDependentModuleBuilder(modules, builder, qname.getPrefix(),
+ nodeToResolve.getLine());
+ if (dependentModuleBuilder == null) {
+ if (context == null) {
+ throw new YangParseException(builder.getName(), nodeToResolve.getLine(),
+ "Failed to resolved type constraints.");
+ }
+ Module dm = findModuleFromContext(context, builder, qname.getPrefix(), nodeToResolve.getLine());
+ TypeDefinition<?> t = findTypeByName(dm.getTypeDefinitions(), qname.getLocalName());
+ if (t instanceof ExtendedType) {
+ ExtendedType extType = (ExtendedType) t;
+ constraints.addFractionDigits(extType.getFractionDigits());
+ constraints.addLengths(extType.getLengths());
+ constraints.addPatterns(extType.getPatterns());
+ constraints.addRanges(extType.getRanges());
+ return constraints;
+ } else {
+ mergeConstraints(t, constraints);
+ return constraints;
+ }
+ } else {
+ TypeDefinitionBuilder tdb = findTypeDefinitionBuilder(nodeToResolve.getPath(),
+ dependentModuleBuilder, qname.getLocalName(), builder.getName(), nodeToResolve.getLine());
+ return findConstraintsFromTypeBuilder(tdb, constraints, modules, dependentModuleBuilder, context);
+ }
+ } else if (type instanceof ExtendedType) {
+ ExtendedType extType = (ExtendedType) type;
+ constraints.addFractionDigits(extType.getFractionDigits());
+ constraints.addLengths(extType.getLengths());
+ constraints.addPatterns(extType.getPatterns());
+ constraints.addRanges(extType.getRanges());
+
+ TypeDefinition<?> base = extType.getBaseType();
+ if (base instanceof UnknownType) {
+ ModuleBuilder dependentModule = findDependentModuleBuilder(modules, builder, base.getQName()
+ .getPrefix(), nodeToResolve.getLine());
+ TypeDefinitionBuilder tdb = findTypeDefinitionBuilder(nodeToResolve.getPath(), dependentModule,
+ base.getQName().getLocalName(), builder.getName(), nodeToResolve.getLine());
+ return findConstraintsFromTypeBuilder(tdb, constraints, modules, dependentModule, context);
+ } else {
+ // it has to be base yang type
+ mergeConstraints(type, constraints);
+ return constraints;
+ }
+ } else {
+ // it is base yang type
+ mergeConstraints(type, constraints);
+ return constraints;
+ }
+ }
+ }
+
+ /**
+ * Search for type definition builder by name.
+ *
+ * @param dirtyNodeSchemaPath
+ * schema path of node which contains unresolved type
+ * @param dependentModule
+ * module which should contains referenced type
+ * @param typeName
+ * name of type definition
+ * @param currentModuleName
+ * name of current module
+ * @param line
+ * current line in yang model
+ * @return
+ */
+ public static TypeDefinitionBuilder findTypeDefinitionBuilder(final SchemaPath dirtyNodeSchemaPath,
+ final ModuleBuilder dependentModule, final String typeName, final String currentModuleName, final int line) {
+ final List<QName> path = dirtyNodeSchemaPath.getPath();
+ TypeDefinitionBuilder result = null;
+
+ Set<TypeDefinitionBuilder> typedefs = dependentModule.getModuleTypedefs();
+ result = findTypedefBuilderByName(typedefs, typeName);
+
+ if (result == null) {
+ Builder currentNode = null;
+ final List<String> currentPath = new ArrayList<String>();
+ currentPath.add(dependentModule.getName());
+
+ for (int i = 0; i < path.size(); i++) {
+ QName qname = path.get(i);
+ currentPath.add(qname.getLocalName());
+ currentNode = dependentModule.getModuleNode(currentPath);
+
+ if (currentNode instanceof RpcDefinitionBuilder) {
+ typedefs = ((RpcDefinitionBuilder) currentNode).getTypeDefinitions();
+ } else if (currentNode instanceof DataNodeContainerBuilder) {
+ typedefs = ((DataNodeContainerBuilder) currentNode).getTypeDefinitions();
+ } else {
+ typedefs = Collections.emptySet();
+ }
+
+ result = findTypedefBuilderByName(typedefs, typeName);
+ if (result != null) {
+ break;
+ }
+ }
+ }
+
+ if (result != null) {
+ return result;
+ }
+ throw new YangParseException(currentModuleName, line, "Referenced type '" + typeName + "' not found.");
+ }
+
}
--- /dev/null
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.yang.parser.util;
+
+import static org.opendaylight.controller.yang.parser.util.ParserUtils.*;
+
+import java.lang.reflect.Method;
+import java.util.List;
+
+import org.opendaylight.controller.yang.model.api.AnyXmlSchemaNode;
+import org.opendaylight.controller.yang.model.api.ChoiceNode;
+import org.opendaylight.controller.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.controller.yang.model.api.GroupingDefinition;
+import org.opendaylight.controller.yang.model.api.LeafListSchemaNode;
+import org.opendaylight.controller.yang.model.api.LeafSchemaNode;
+import org.opendaylight.controller.yang.model.api.ListSchemaNode;
+import org.opendaylight.controller.yang.model.api.MustDefinition;
+import org.opendaylight.controller.yang.model.api.TypeDefinition;
+import org.opendaylight.controller.yang.model.util.ExtendedType;
+import org.opendaylight.controller.yang.parser.builder.api.Builder;
+import org.opendaylight.controller.yang.parser.builder.api.GroupingBuilder;
+import org.opendaylight.controller.yang.parser.builder.api.SchemaNodeBuilder;
+import org.opendaylight.controller.yang.parser.builder.api.TypeDefinitionBuilder;
+import org.opendaylight.controller.yang.parser.builder.api.UsesNodeBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.AnyXmlBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.ChoiceBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.ContainerSchemaNodeBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.LeafListSchemaNodeBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.LeafSchemaNodeBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.ListSchemaNodeBuilder;
+import org.opendaylight.controller.yang.parser.builder.impl.TypeDefinitionBuilderImpl;
+import org.opendaylight.controller.yang.parser.builder.impl.UnknownSchemaNodeBuilder;
+
+/**
+ * Utility class with helper methods to perform operations tied to refine
+ * process.
+ */
+public class RefineUtils {
+
+ private RefineUtils() {
+ }
+
+ /**
+ * Find original builder of node to refine and return copy of this builder.
+ * <p>
+ * We must create and use a copy of builder to preserve original builder
+ * state, because this object will be refined (modified) and later added to
+ * {@link UsesNodeBuilder}.
+ * </p>
+ *
+ * @param targetGrouping
+ * builder of grouping which should contains node to refine
+ * @param refine
+ * refine object containing informations about refine
+ * @param moduleName
+ * current module name
+ * @return
+ */
+ public static SchemaNodeBuilder getRefineNodeFromGroupingBuilder(final GroupingBuilder targetGrouping,
+ final RefineHolder refine, final String moduleName) {
+ Builder result = null;
+ final Builder lookedUpBuilder = findRefineTargetBuilder(targetGrouping, refine.getName());
+ if (lookedUpBuilder instanceof LeafSchemaNodeBuilder) {
+ result = copyLeafBuilder((LeafSchemaNodeBuilder) lookedUpBuilder);
+ } else if (lookedUpBuilder instanceof ContainerSchemaNodeBuilder) {
+ result = copyContainerBuilder((ContainerSchemaNodeBuilder) lookedUpBuilder);
+ } else if (lookedUpBuilder instanceof ListSchemaNodeBuilder) {
+ result = copyListBuilder((ListSchemaNodeBuilder) lookedUpBuilder);
+ } else if (lookedUpBuilder instanceof LeafListSchemaNodeBuilder) {
+ result = copyLeafListBuilder((LeafListSchemaNodeBuilder) lookedUpBuilder);
+ } else if (lookedUpBuilder instanceof ChoiceBuilder) {
+ result = copyChoiceBuilder((ChoiceBuilder) lookedUpBuilder);
+ } else if (lookedUpBuilder instanceof AnyXmlBuilder) {
+ result = copyAnyXmlBuilder((AnyXmlBuilder) lookedUpBuilder);
+ } else if (lookedUpBuilder instanceof GroupingBuilder) {
+ result = copyGroupingBuilder((GroupingBuilder) lookedUpBuilder);
+ } else if (lookedUpBuilder instanceof TypeDefinitionBuilder) {
+ result = copyTypedefBuilder((TypeDefinitionBuilderImpl) lookedUpBuilder);
+ } else {
+ throw new YangParseException(moduleName, refine.getLine(), "Target '" + refine.getName()
+ + "' can not be refined");
+ }
+ return (SchemaNodeBuilder) result;
+ }
+
+ /**
+ * Create builder object from refine target node.
+ *
+ * @param grouping
+ * grouping which should contains node to refine
+ * @param refine
+ * refine object containing informations about refine
+ * @param moduleName
+ * current module name
+ * @return
+ */
+ public static SchemaNodeBuilder getRefineNodeFromGroupingDefinition(final GroupingDefinition grouping,
+ final RefineHolder refine, final String moduleName) {
+ SchemaNodeBuilder result = null;
+ final int line = refine.getLine();
+ final Object lookedUpNode = findRefineTargetNode(grouping, refine.getName());
+ if (lookedUpNode instanceof LeafSchemaNode) {
+ result = createLeafBuilder((LeafSchemaNode) lookedUpNode, line);
+ } else if (lookedUpNode instanceof ContainerSchemaNode) {
+ result = createContainer((ContainerSchemaNode) lookedUpNode, line);
+ } else if (lookedUpNode instanceof ListSchemaNode) {
+ result = createList((ListSchemaNode) lookedUpNode, line);
+ } else if (lookedUpNode instanceof LeafListSchemaNode) {
+ result = createLeafList((LeafListSchemaNode) lookedUpNode, line);
+ } else if (lookedUpNode instanceof ChoiceNode) {
+ result = createChoice((ChoiceNode) lookedUpNode, line);
+ } else if (lookedUpNode instanceof AnyXmlSchemaNode) {
+ result = createAnyXml((AnyXmlSchemaNode) lookedUpNode, line);
+ } else if (lookedUpNode instanceof GroupingDefinition) {
+ result = createGrouping((GroupingDefinition) lookedUpNode, line);
+ } else if (lookedUpNode instanceof TypeDefinition) {
+ result = createTypedef((ExtendedType) lookedUpNode, line);
+ } else {
+ throw new YangParseException(moduleName, line, "Target '" + refine.getName() + "' can not be refined");
+ }
+ return result;
+ }
+
+ public static void refineLeaf(LeafSchemaNodeBuilder leaf, RefineHolder refine, int line) {
+ String defaultStr = refine.getDefaultStr();
+ Boolean mandatory = refine.isMandatory();
+ MustDefinition must = refine.getMust();
+ List<UnknownSchemaNodeBuilder> unknownNodes = refine.getUnknownNodes();
+
+ if (defaultStr != null && !("".equals(defaultStr))) {
+ leaf.setDefaultStr(defaultStr);
+ }
+ if (mandatory != null) {
+ leaf.getConstraints().setMandatory(mandatory);
+ }
+ if (must != null) {
+ leaf.getConstraints().addMustDefinition(must);
+ }
+ if (unknownNodes != null) {
+ for (UnknownSchemaNodeBuilder unknown : unknownNodes) {
+ leaf.addUnknownSchemaNode(unknown);
+ }
+ }
+ }
+
+ public static void refineContainer(ContainerSchemaNodeBuilder container, RefineHolder refine, int line) {
+ Boolean presence = refine.isPresence();
+ MustDefinition must = refine.getMust();
+ List<UnknownSchemaNodeBuilder> unknownNodes = refine.getUnknownNodes();
+
+ if (presence != null) {
+ container.setPresence(presence);
+ }
+ if (must != null) {
+ container.getConstraints().addMustDefinition(must);
+ }
+ if (unknownNodes != null) {
+ for (UnknownSchemaNodeBuilder unknown : unknownNodes) {
+ container.addUnknownSchemaNode(unknown);
+ }
+ }
+ }
+
+ public static void refineList(ListSchemaNodeBuilder list, RefineHolder refine, int line) {
+ MustDefinition must = refine.getMust();
+ Integer min = refine.getMinElements();
+ Integer max = refine.getMaxElements();
+ List<UnknownSchemaNodeBuilder> unknownNodes = refine.getUnknownNodes();
+
+ if (must != null) {
+ list.getConstraints().addMustDefinition(must);
+ }
+ if (min != null) {
+ list.getConstraints().setMinElements(min);
+ }
+ if (max != null) {
+ list.getConstraints().setMaxElements(max);
+ }
+ if (unknownNodes != null) {
+ for (UnknownSchemaNodeBuilder unknown : unknownNodes) {
+ list.addUnknownSchemaNode(unknown);
+ }
+ }
+ }
+
+ public static void refineLeafList(LeafListSchemaNodeBuilder leafList, RefineHolder refine, int line) {
+ MustDefinition must = refine.getMust();
+ Integer min = refine.getMinElements();
+ Integer max = refine.getMaxElements();
+ List<UnknownSchemaNodeBuilder> unknownNodes = refine.getUnknownNodes();
+
+ if (must != null) {
+ leafList.getConstraints().addMustDefinition(must);
+ }
+ if (min != null) {
+ leafList.getConstraints().setMinElements(min);
+ }
+ if (max != null) {
+ leafList.getConstraints().setMaxElements(max);
+ }
+ if (unknownNodes != null) {
+ for (UnknownSchemaNodeBuilder unknown : unknownNodes) {
+ leafList.addUnknownSchemaNode(unknown);
+ }
+ }
+ }
+
+ public static void refineChoice(ChoiceBuilder choice, RefineHolder refine, int line) {
+ String defaultStr = refine.getDefaultStr();
+ Boolean mandatory = refine.isMandatory();
+ List<UnknownSchemaNodeBuilder> unknownNodes = refine.getUnknownNodes();
+
+ if (defaultStr != null) {
+ choice.setDefaultCase(defaultStr);
+ }
+ if (mandatory != null) {
+ choice.getConstraints().setMandatory(mandatory);
+ }
+ if (unknownNodes != null) {
+ for (UnknownSchemaNodeBuilder unknown : unknownNodes) {
+ choice.addUnknownSchemaNode(unknown);
+ }
+ }
+ }
+
+ public static void refineAnyxml(AnyXmlBuilder anyXml, RefineHolder refine, int line) {
+ Boolean mandatory = refine.isMandatory();
+ MustDefinition must = refine.getMust();
+ List<UnknownSchemaNodeBuilder> unknownNodes = refine.getUnknownNodes();
+
+ if (mandatory != null) {
+ anyXml.getConstraints().setMandatory(mandatory);
+ }
+ if (must != null) {
+ anyXml.getConstraints().addMustDefinition(must);
+ }
+ if (unknownNodes != null) {
+ for (UnknownSchemaNodeBuilder unknown : unknownNodes) {
+ anyXml.addUnknownSchemaNode(unknown);
+ }
+ }
+ }
+
+ /**
+ * Check if refine can be performed on given node.
+ *
+ * @param node
+ * node to refine
+ * @param refine
+ * refine object containing information about refine process
+ */
+ public static void checkRefine(SchemaNodeBuilder node, RefineHolder refine) {
+ String name = node.getQName().getLocalName();
+ int line = refine.getLine();
+
+ String defaultStr = refine.getDefaultStr();
+ Boolean mandatory = refine.isMandatory();
+ Boolean presence = refine.isPresence();
+ MustDefinition must = refine.getMust();
+ Integer min = refine.getMinElements();
+ Integer max = refine.getMaxElements();
+
+ if (node instanceof AnyXmlBuilder) {
+ checkRefineDefault(node, defaultStr, line);
+ checkRefinePresence(node, presence, line);
+ checkRefineMinMax(name, line, min, max);
+ } else if (node instanceof ChoiceBuilder) {
+ checkRefinePresence(node, presence, line);
+ checkRefineMust(node, must, line);
+ checkRefineMinMax(name, line, min, max);
+ } else if (node instanceof ContainerSchemaNodeBuilder) {
+ checkRefineDefault(node, defaultStr, line);
+ checkRefineMandatory(node, mandatory, line);
+ checkRefineMust(node, must, line);
+ checkRefineMinMax(name, line, min, max);
+ } else if (node instanceof LeafSchemaNodeBuilder) {
+ checkRefinePresence(node, presence, line);
+ checkRefineMinMax(name, line, min, max);
+ } else if (node instanceof LeafListSchemaNodeBuilder || node instanceof ListSchemaNodeBuilder) {
+ checkRefineDefault(node, defaultStr, line);
+ checkRefinePresence(node, presence, line);
+ checkRefineMandatory(node, mandatory, line);
+ } else if (node instanceof GroupingBuilder || node instanceof TypeDefinitionBuilder
+ || node instanceof UsesNodeBuilder) {
+ checkRefineDefault(node, defaultStr, line);
+ checkRefinePresence(node, presence, line);
+ checkRefineMandatory(node, mandatory, line);
+ checkRefineMust(node, must, line);
+ checkRefineMinMax(name, line, min, max);
+ }
+ }
+
+ private static void checkRefineDefault(SchemaNodeBuilder node, String defaultStr, int line) {
+ if (defaultStr != null) {
+ throw new YangParseException(line, "Can not refine 'default' for '" + node.getQName().getLocalName() + "'.");
+ }
+ }
+
+ private static void checkRefineMandatory(SchemaNodeBuilder node, Boolean mandatory, int line) {
+ if (mandatory != null) {
+ throw new YangParseException(line, "Can not refine 'mandatory' for '" + node.getQName().getLocalName()
+ + "'.");
+ }
+ }
+
+ private static void checkRefinePresence(SchemaNodeBuilder node, Boolean presence, int line) {
+ if (presence != null) {
+ throw new YangParseException(line, "Can not refine 'presence' for '" + node.getQName().getLocalName()
+ + "'.");
+ }
+ }
+
+ private static void checkRefineMust(SchemaNodeBuilder node, MustDefinition must, int line) {
+ if (must != null) {
+ throw new YangParseException(line, "Can not refine 'must' for '" + node.getQName().getLocalName() + "'.");
+ }
+ }
+
+ private static void checkRefineMinMax(String refineTargetName, int refineLine, Integer min, Integer max) {
+ if (min != null || max != null) {
+ throw new YangParseException(refineLine, "Can not refine 'min-elements' or 'max-elements' for '"
+ + refineTargetName + "'.");
+ }
+ }
+
+ /**
+ * Perform refine operation of following parameters:
+ * <ul>
+ * <li>description</li>
+ * <li>reference</li>
+ * <li>config</li>
+ * </ul>
+ *
+ * These parameters may be refined for any node.
+ *
+ * @param node
+ * node to refine
+ * @param refine
+ * refine object containing information about refine process
+ * @param line
+ * current line in yang model
+ */
+ public static void refineDefault(final Builder node, final RefineHolder refine, final int line) {
+ Class<? extends Builder> cls = node.getClass();
+
+ String description = refine.getDescription();
+ if (description != null) {
+ try {
+ Method method = cls.getDeclaredMethod("setDescription", String.class);
+ method.invoke(node, description);
+ } catch (Exception e) {
+ throw new YangParseException(line, "Cannot refine description in " + cls.getName(), e);
+ }
+ }
+
+ String reference = refine.getReference();
+ if (reference != null) {
+ try {
+ Method method = cls.getDeclaredMethod("setReference", String.class);
+ method.invoke(node, reference);
+ } catch (Exception e) {
+ throw new YangParseException(line, "Cannot refine reference in " + cls.getName(), e);
+ }
+ }
+
+ Boolean config = refine.isConfig();
+ if (config != null) {
+ try {
+ Method method = cls.getDeclaredMethod("setConfiguration", Boolean.TYPE);
+ method.invoke(node, config);
+ } catch (Exception e) {
+ throw new YangParseException(line, "Cannot refine config in " + cls.getName(), e);
+ }
+ }
+ }
+
+ /**
+ * Perform refine operation on given node.
+ *
+ * @param nodeToRefine
+ * builder of node to refine
+ * @param refine
+ * refine object containing information about refine process
+ * @param line
+ * current line in yang model
+ */
+ public static void performRefine(SchemaNodeBuilder nodeToRefine, RefineHolder refine, int line) {
+ checkRefine(nodeToRefine, refine);
+ refineDefault(nodeToRefine, refine, line);
+ if (nodeToRefine instanceof LeafSchemaNodeBuilder) {
+ refineLeaf((LeafSchemaNodeBuilder) nodeToRefine, refine, line);
+ } else if (nodeToRefine instanceof ContainerSchemaNodeBuilder) {
+ refineContainer((ContainerSchemaNodeBuilder) nodeToRefine, refine, line);
+ } else if (nodeToRefine instanceof ListSchemaNodeBuilder) {
+ refineList((ListSchemaNodeBuilder) nodeToRefine, refine, line);
+ } else if (nodeToRefine instanceof LeafListSchemaNodeBuilder) {
+ refineLeafList((LeafListSchemaNodeBuilder) nodeToRefine, refine, line);
+ } else if (nodeToRefine instanceof ChoiceBuilder) {
+ refineChoice((ChoiceBuilder) nodeToRefine, refine, line);
+ } else if (nodeToRefine instanceof AnyXmlBuilder) {
+ refineAnyxml((AnyXmlBuilder) nodeToRefine, refine, line);
+ }
+ }
+
+}
private static List<PatternConstraint> getPatternConstraint(final Type_body_stmtsContext ctx) {
List<PatternConstraint> patterns = new ArrayList<PatternConstraint>();
- outer: for (int j = 0; j < ctx.getChildCount(); j++) {
+ for (int j = 0; j < ctx.getChildCount(); j++) {
ParseTree stringRestrChild = ctx.getChild(j);
if (stringRestrChild instanceof String_restrictionsContext) {
for (int k = 0; k < stringRestrChild.getChildCount(); k++) {
ParseTree lengthChild = stringRestrChild.getChild(k);
if (lengthChild instanceof Pattern_stmtContext) {
patterns.add(parsePatternConstraint((Pattern_stmtContext) lengthChild));
- if (k == lengthChild.getChildCount() - 1) {
- break outer;
- }
}
}
}
import org.opendaylight.controller.yang.common.QName;
import org.opendaylight.controller.yang.model.api.Module;
import org.opendaylight.controller.yang.model.api.ModuleImport;
+import org.opendaylight.controller.yang.model.api.SchemaContext;
import org.opendaylight.controller.yang.model.api.SchemaPath;
import org.opendaylight.controller.yang.model.api.TypeDefinition;
import org.opendaylight.controller.yang.model.parser.api.YangModelParser;
return modules.iterator().next();
}
+ public static Module loadModuleWithContext(final InputStream stream, final SchemaContext context) throws IOException {
+ final YangModelParser parser = new YangParserImpl();
+ final List<InputStream> input = Collections.singletonList(stream);
+ final Set<Module> modules = new HashSet<Module>(parser.parseYangModelsFromStreams(input, context));
+ stream.close();
+ return modules.iterator().next();
+ }
+
+ public static Set<Module> loadModulesWithContext(final List<InputStream> input, final SchemaContext context) throws IOException {
+ final YangModelParser parser = new YangParserImpl();
+ final Set<Module> modules = new HashSet<Module>(parser.parseYangModelsFromStreams(input, context));
+ for(InputStream is : input) {
+ if(is != null) {
+ is.close();
+ }
+ }
+ return modules;
+ }
+
public static Module findModule(Set<Module> modules, String moduleName) {
Module result = null;
for (Module module : modules) {
--- /dev/null
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.yang.parser.impl;
+
+import static org.junit.Assert.*;
+
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.net.URI;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.junit.Test;
+import org.opendaylight.controller.yang.common.QName;
+import org.opendaylight.controller.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.controller.yang.model.api.DataSchemaNode;
+import org.opendaylight.controller.yang.model.api.GroupingDefinition;
+import org.opendaylight.controller.yang.model.api.IdentitySchemaNode;
+import org.opendaylight.controller.yang.model.api.LeafSchemaNode;
+import org.opendaylight.controller.yang.model.api.ListSchemaNode;
+import org.opendaylight.controller.yang.model.api.Module;
+import org.opendaylight.controller.yang.model.api.MustDefinition;
+import org.opendaylight.controller.yang.model.api.SchemaContext;
+import org.opendaylight.controller.yang.model.api.SchemaNode;
+import org.opendaylight.controller.yang.model.api.SchemaPath;
+import org.opendaylight.controller.yang.model.api.TypeDefinition;
+import org.opendaylight.controller.yang.model.api.UnknownSchemaNode;
+import org.opendaylight.controller.yang.model.api.UsesNode;
+import org.opendaylight.controller.yang.model.api.type.RangeConstraint;
+import org.opendaylight.controller.yang.model.util.ExtendedType;
+
+import com.google.common.collect.Lists;
+
+public class YangParserWithContextTest {
+ private final DateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
+ private final YangParserImpl parser = new YangParserImpl();
+
+ @Test
+ public void testTypeFromContext() throws Exception {
+ SchemaContext context = null;
+ String resource = "/types/ietf-inet-types@2010-09-24.yang";
+ InputStream stream = new FileInputStream(getClass().getResource(resource).getPath());
+ context = parser.resolveSchemaContext(TestUtils.loadModules(Lists.newArrayList(stream)));
+ stream.close();
+
+ Module module = null;
+ resource = "/context-test/test1.yang";
+ InputStream stream2 = new FileInputStream(getClass().getResource(resource).getPath());
+ module = TestUtils.loadModuleWithContext(stream2, context);
+ stream2.close();
+ assertNotNull(module);
+
+ LeafSchemaNode leaf = (LeafSchemaNode) module.getDataChildByName("id");
+
+ ExtendedType leafType = (ExtendedType) leaf.getType();
+ QName qname = leafType.getQName();
+ assertEquals(URI.create("urn:simple.demo.test1"), qname.getNamespace());
+ assertEquals(simpleDateFormat.parse("2013-06-18"), qname.getRevision());
+ assertEquals("t1", qname.getPrefix());
+ assertEquals("port-number", qname.getLocalName());
+
+ ExtendedType leafBaseType = (ExtendedType) leafType.getBaseType();
+ qname = leafBaseType.getQName();
+ assertEquals(URI.create("urn:ietf:params:xml:ns:yang:ietf-inet-types"), qname.getNamespace());
+ assertEquals(simpleDateFormat.parse("2010-09-24"), qname.getRevision());
+ assertEquals("inet", qname.getPrefix());
+ assertEquals("port-number", qname.getLocalName());
+
+ ExtendedType dscpExt = (ExtendedType)TestUtils.findTypedef(module.getTypeDefinitions(), "dscp-ext");
+ List<RangeConstraint> ranges = dscpExt.getRanges();
+ assertEquals(1, ranges.size());
+ RangeConstraint range = ranges.get(0);
+ assertEquals(0L, range.getMin());
+ assertEquals(63L, range.getMax());
+
+ }
+
+ @Test
+ public void testUsesGroupingFromContext() throws Exception {
+ SchemaContext context = null;
+ try (InputStream stream = new FileInputStream(getClass().getResource("/model/testfile2.yang").getPath())) {
+ context = parser.resolveSchemaContext(TestUtils.loadModules(Lists.newArrayList(stream)));
+ }
+ Module module = null;
+ try (InputStream stream = new FileInputStream(getClass().getResource("/context-test/test2.yang").getPath())) {
+ module = TestUtils.loadModuleWithContext(stream, context);
+ }
+ assertNotNull(module);
+
+ ContainerSchemaNode peer = (ContainerSchemaNode) module.getDataChildByName("peer");
+ ContainerSchemaNode destination = (ContainerSchemaNode) peer.getDataChildByName("destination");
+ Set<UsesNode> usesNodes = destination.getUses();
+ assertEquals(1, usesNodes.size());
+ UsesNode usesNode = usesNodes.iterator().next();
+
+ // test grouping path
+ List<QName> path = new ArrayList<QName>();
+ QName qname = new QName(URI.create("urn:simple.types.data.demo"), simpleDateFormat.parse("2013-02-27"), "t2",
+ "target");
+ path.add(qname);
+ SchemaPath expectedPath = new SchemaPath(path, true);
+ assertEquals(expectedPath, usesNode.getGroupingPath());
+
+ // test refine
+ Map<SchemaPath, SchemaNode> refines = usesNode.getRefines();
+ assertEquals(5, refines.size());
+
+ LeafSchemaNode refineLeaf = null;
+ ContainerSchemaNode refineContainer = null;
+ ListSchemaNode refineList = null;
+ GroupingDefinition refineGrouping = null;
+ TypeDefinition<?> typedef = null;
+ for (Map.Entry<SchemaPath, SchemaNode> entry : refines.entrySet()) {
+ SchemaNode value = entry.getValue();
+ if (value instanceof LeafSchemaNode) {
+ refineLeaf = (LeafSchemaNode) value;
+ } else if (value instanceof ContainerSchemaNode) {
+ refineContainer = (ContainerSchemaNode) value;
+ } else if (value instanceof ListSchemaNode) {
+ refineList = (ListSchemaNode) value;
+ } else if (value instanceof GroupingDefinition) {
+ refineGrouping = (GroupingDefinition) value;
+ } else if (value instanceof TypeDefinition<?>) {
+ typedef = (TypeDefinition<?>) value;
+ }
+ }
+
+ // leaf address
+ assertNotNull(refineLeaf);
+ assertEquals("address", refineLeaf.getQName().getLocalName());
+ assertEquals("description of address defined by refine", refineLeaf.getDescription());
+ assertEquals("address reference added by refine", refineLeaf.getReference());
+ assertFalse(refineLeaf.isConfiguration());
+ assertTrue(refineLeaf.getConstraints().isMandatory());
+ Set<MustDefinition> leafMustConstraints = refineLeaf.getConstraints().getMustConstraints();
+ assertEquals(1, leafMustConstraints.size());
+ MustDefinition leafMust = leafMustConstraints.iterator().next();
+ assertEquals("\"ifType != 'ethernet' or (ifType = 'ethernet' and ifMTU = 1500)\"", leafMust.toString());
+
+ // container port
+ assertNotNull(refineContainer);
+ Set<MustDefinition> mustConstraints = refineContainer.getConstraints().getMustConstraints();
+ assertTrue(mustConstraints.isEmpty());
+ assertEquals("description of port defined by refine", refineContainer.getDescription());
+ assertEquals("port reference added by refine", refineContainer.getReference());
+ assertFalse(refineContainer.isConfiguration());
+ assertTrue(refineContainer.isPresenceContainer());
+
+ // list addresses
+ assertNotNull(refineList);
+ assertEquals("description of addresses defined by refine", refineList.getDescription());
+ assertEquals("addresses reference added by refine", refineList.getReference());
+ assertFalse(refineList.isConfiguration());
+ assertEquals(2, (int) refineList.getConstraints().getMinElements());
+ assertEquals(12, (int) refineList.getConstraints().getMaxElements());
+
+ // grouping target-inner
+ assertNotNull(refineGrouping);
+ Set<DataSchemaNode> refineGroupingChildren = refineGrouping.getChildNodes();
+ assertEquals(1, refineGroupingChildren.size());
+ LeafSchemaNode refineGroupingLeaf = (LeafSchemaNode) refineGroupingChildren.iterator().next();
+ assertEquals("inner-grouping-id", refineGroupingLeaf.getQName().getLocalName());
+ assertEquals("new target-inner grouping description", refineGrouping.getDescription());
+
+ // typedef group-type
+ assertNotNull(typedef);
+ assertEquals("new group-type description", typedef.getDescription());
+ assertEquals("new group-type reference", typedef.getReference());
+ assertTrue(typedef.getBaseType() instanceof ExtendedType);
+ }
+
+ @Test
+ public void testIdentity() throws Exception {
+ SchemaContext context = null;
+ try (InputStream stream = new FileInputStream(getClass().getResource("/types/custom-types-test@2012-4-4.yang")
+ .getPath())) {
+ context = parser.resolveSchemaContext(TestUtils.loadModules(Lists.newArrayList(stream)));
+ }
+ Module module = null;
+ try (InputStream stream = new FileInputStream(getClass().getResource("/context-test/test3.yang").getPath())) {
+ module = TestUtils.loadModuleWithContext(stream, context);
+ }
+ assertNotNull(module);
+
+ Set<IdentitySchemaNode> identities = module.getIdentities();
+ assertEquals(1, identities.size());
+
+ IdentitySchemaNode identity = identities.iterator().next();
+ QName idQName = identity.getQName();
+ assertEquals(URI.create("urn:simple.demo.test3"), idQName.getNamespace());
+ assertEquals(simpleDateFormat.parse("2013-06-18"), idQName.getRevision());
+ assertEquals("t3", idQName.getPrefix());
+ assertEquals("pt", idQName.getLocalName());
+
+ IdentitySchemaNode baseIdentity = identity.getBaseIdentity();
+ QName idBaseQName = baseIdentity.getQName();
+ assertEquals(URI.create("urn:simple.container.demo"), idBaseQName.getNamespace());
+ assertEquals(simpleDateFormat.parse("2012-04-16"), idBaseQName.getRevision());
+ assertEquals("iit", idBaseQName.getPrefix());
+ assertEquals("service-type", idBaseQName.getLocalName());
+ }
+
+ @Test
+ public void testUnknownNodes() throws Exception {
+ SchemaContext context = null;
+ try (InputStream stream = new FileInputStream(getClass().getResource("/types/custom-types-test@2012-4-4.yang").getPath())) {
+ context = parser.resolveSchemaContext(TestUtils.loadModules(Lists.newArrayList(stream)));
+ }
+
+ Module module = null;
+ try (InputStream stream = new FileInputStream(getClass().getResource("/context-test/test3.yang").getPath())) {
+ module = TestUtils.loadModuleWithContext(stream, context);
+ }
+
+ ContainerSchemaNode network = (ContainerSchemaNode) module.getDataChildByName("network");
+ List<UnknownSchemaNode> unknownNodes = network.getUnknownSchemaNodes();
+ assertEquals(1, unknownNodes.size());
+
+ UnknownSchemaNode un = unknownNodes.iterator().next();
+ QName unType = un.getNodeType();
+ assertEquals(URI.create("urn:simple.container.demo"), unType.getNamespace());
+ assertEquals(simpleDateFormat.parse("2012-04-16"), unType.getRevision());
+ assertEquals("custom", unType.getPrefix());
+ assertEquals("mountpoint", unType.getLocalName());
+ assertEquals("point", un.getNodeParameter());
+ }
+
+ @Test
+ public void testAugment() throws Exception {
+ // load first module
+ SchemaContext context = null;
+ String resource = "/context-augment-test/test4.yang";
+
+ try (InputStream stream = new FileInputStream(getClass().getResource(resource).getPath())) {
+ context = parser.resolveSchemaContext(TestUtils.loadModules(Lists.newArrayList(stream)));
+ }
+
+ Set<Module> contextModules = context.getModules();
+ Module t3 = TestUtils.findModule(contextModules, "test4");
+ ContainerSchemaNode interfaces = (ContainerSchemaNode) t3.getDataChildByName("interfaces");
+ ListSchemaNode ifEntry = (ListSchemaNode) interfaces.getDataChildByName("ifEntry");
+
+ // load another modules and parse them against already existing context
+ Set<Module> modules = null;
+ try (InputStream stream1 = new FileInputStream(getClass().getResource("/context-augment-test/test1.yang")
+ .getPath());
+ InputStream stream2 = new FileInputStream(getClass().getResource("/context-augment-test/test2.yang")
+ .getPath());
+ InputStream stream3 = new FileInputStream(getClass().getResource("/context-augment-test/test3.yang")
+ .getPath())) {
+ List<InputStream> input = Lists.newArrayList(stream1, stream2, stream3);
+ modules = TestUtils.loadModulesWithContext(input, context);
+ }
+ assertNotNull(modules);
+
+ // test augmentation process
+ ContainerSchemaNode augmentHolder = (ContainerSchemaNode) ifEntry.getDataChildByName("augment-holder");
+ assertNotNull(augmentHolder);
+ DataSchemaNode ds0 = augmentHolder.getDataChildByName("ds0ChannelNumber");
+ assertNotNull(ds0);
+ DataSchemaNode interfaceId = augmentHolder.getDataChildByName("interface-id");
+ assertNotNull(interfaceId);
+ DataSchemaNode higherLayerIf = augmentHolder.getDataChildByName("higher-layer-if");
+ assertNotNull(higherLayerIf);
+ ContainerSchemaNode schemas = (ContainerSchemaNode) augmentHolder.getDataChildByName("schemas");
+ assertNotNull(schemas);
+ assertNotNull(schemas.getDataChildByName("id"));
+
+ // test augment target after augmentation: check if it is same instance
+ ListSchemaNode ifEntryAfterAugment = (ListSchemaNode) interfaces.getDataChildByName("ifEntry");
+ assertTrue(ifEntry == ifEntryAfterAugment);
+ }
+
+}
--- /dev/null
+module test1 {
+
+ yang-version 1;
+ namespace "urn:simple.demo.test1";
+ prefix "t1";
+
+ import test3 {
+ prefix "t3";
+ revision-date 2013-06-18;
+ }
+
+ import test2 {
+ prefix "t2";
+ revision-date 2013-06-18;
+ }
+
+ import test4 {
+ prefix "t4";
+ revision-date 2013-06-18;
+ }
+
+ organization "opendaylight";
+ contact "WILL-BE-DEFINED-LATER";
+ revision 2013-06-18 {
+ }
+
+ augment "/t4:interfaces/t4:ifEntry/t2:augment-holder/t3:schemas" {
+ when "if:ifType='ds0'";
+ leaf id {
+ type string;
+ }
+ }
+
+}
--- /dev/null
+module test2 {
+
+ yang-version 1;
+ namespace "urn:simple.demo.test2";
+ prefix "t2";
+
+ import test3 {
+ prefix "t3";
+ revision-date 2013-06-18;
+ }
+
+ import test4 {
+ prefix "t4";
+ revision-date 2013-06-18;
+ }
+
+ organization "opendaylight";
+ contact "WILL-BE-DEFINED-LATER";
+ revision 2013-06-18 {
+ }
+
+ augment "/t4:interfaces/t4:ifEntry/t3:augment-holder" {
+ when "if:ifType='ds0'";
+ leaf ds0ChannelNumber {
+ type string;
+ }
+ leaf interface-id {
+ type leafref {
+ path "/if:interfaces/if:interface/if:name";
+ }
+ }
+ leaf-list higher-layer-if {
+ type leafref {
+ path "/if:interfaces/if:interface/if:higher-layer-if";
+ }
+ }
+ container schemas {
+ }
+ }
+
+}
--- /dev/null
+module test3 {
+
+ yang-version 1;
+ namespace "urn:simple.demo.test3";
+ prefix "t3";
+
+ import test4 {
+ prefix "t4";
+ revision-date 2013-06-18;
+ }
+
+ organization "opendaylight";
+ contact "WILL-BE-DEFINED-LATER";
+ revision 2013-06-18 {
+ }
+
+ augment "/t4:interfaces/t4:ifEntry" {
+ when "if:ifType='ds0'";
+ container augment-holder {
+ description "Description for augment holder";
+ }
+ }
+
+}
--- /dev/null
+module test4 {
+
+ yang-version 1;
+ namespace "urn:simple.demo.test4";
+ prefix "t4";
+
+ organization "opendaylight";
+ contact "WILL-BE-DEFINED-LATER";
+ revision 2013-06-18 {
+ }
+
+ container interfaces {
+ list ifEntry {
+ key "ifIndex";
+ leaf ifIndex {
+ type uint32;
+ units minutes;
+ }
+ leaf ifMtu {
+ type int32;
+ }
+ min-elements 1;
+ max-elements 11;
+ }
+ }
+
+}
--- /dev/null
+module test1 {
+
+ yang-version 1;
+ namespace "urn:simple.demo.test1";
+ prefix "t1";
+
+ import ietf-inet-types {
+ prefix "inet";
+ revision-date 2010-09-24;
+ }
+
+ organization "opendaylight";
+ contact "WILL-BE-DEFINED-LATER";
+ revision 2013-06-18 {
+ }
+
+ typedef dscp-ext {
+ type inet:dscp {
+ range "min..max";
+ }
+ }
+
+ leaf id {
+ type inet:port-number {
+ range "0..65536";
+ }
+ }
+
+}
--- /dev/null
+module test2 {
+
+ yang-version 1;
+ namespace "urn:simple.demo.test2";
+ prefix "t2";
+
+ import types2 {
+ prefix "data";
+ }
+
+ organization "opendaylight";
+ contact "WILL-BE-DEFINED-LATER";
+ revision 2013-06-18 {
+ }
+
+ container peer {
+ container destination {
+ uses data:target {
+ refine address {
+ default "1.2.3.4";
+ description "description of address defined by refine";
+ reference "address reference added by refine";
+ config false;
+ mandatory true;
+ must "ifType != 'ethernet' or " +
+ "(ifType = 'ethernet' and ifMTU = 1500)" {
+ error-message "An ethernet MTU must be 1500";
+ }
+ }
+ refine port {
+ description "description of port defined by refine";
+ reference "port reference added by refine";
+ config false;
+ presence "presence is required";
+ }
+ refine addresses {
+ description "description of addresses defined by refine";
+ reference "addresses reference added by refine";
+ config false;
+ min-elements 2;
+ max-elements 12;
+ }
+ refine target-inner {
+ description "new target-inner grouping description";
+ }
+ refine group-type {
+ description "new group-type description";
+ reference "new group-type reference";
+ }
+ }
+ }
+ }
+
+}
--- /dev/null
+module test3 {
+
+ yang-version 1;
+ namespace "urn:simple.demo.test3";
+ prefix "t3";
+
+ import custom-types-test {
+ prefix "custom";
+ }
+
+ organization "opendaylight";
+ contact "WILL-BE-DEFINED-LATER";
+ revision 2013-06-18 {
+ }
+
+ identity pt {
+ base custom:service-type;
+ }
+
+ container network {
+ custom:mountpoint point {
+ mnt:target-ref target;
+ }
+
+ description "network-description";
+ reference "network-reference";
+ status obsolete;
+ config true;
+ presence "some presence text";
+ }
+
+}
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>model-ietf</artifactId>
+ <groupId>org.opendaylight.controller</groupId>
+ <version>0.5-SNAPSHOT</version>
+ </parent>
+ <artifactId>ietf-inet-types</artifactId>
+ <version>2010.09.24-SNAPSHOT</version>
+</project>
\ No newline at end of file
--- /dev/null
+ module ietf-inet-types {
+
+ namespace "urn:ietf:params:xml:ns:yang:ietf-inet-types";
+ prefix "inet";
+
+ organization
+ "IETF NETMOD (NETCONF Data Modeling Language) Working Group";
+
+ contact
+ "WG Web: <http://tools.ietf.org/wg/netmod/>
+ WG List: <mailto:netmod@ietf.org>
+
+ WG Chair: David Partain
+ <mailto:david.partain@ericsson.com>
+
+ WG Chair: David Kessens
+ <mailto:david.kessens@nsn.com>
+
+ Editor: Juergen Schoenwaelder
+ <mailto:j.schoenwaelder@jacobs-university.de>";
+
+ description
+ "This module contains a collection of generally useful derived
+ YANG data types for Internet addresses and related things.
+
+ Copyright (c) 2010 IETF Trust and the persons identified as
+ authors of the code. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, is permitted pursuant to, and subject to the license
+ terms contained in, the Simplified BSD License set forth in Section
+ 4.c of the IETF Trust's Legal Provisions Relating to IETF Documents
+ (http://trustee.ietf.org/license-info).
+
+ This version of this YANG module is part of RFC 6021; see
+ the RFC itself for full legal notices.";
+
+ revision 2010-09-24 {
+ description
+ "Initial revision.";
+ reference
+ "RFC 6021: Common YANG Data Types";
+ }
+
+ /*** collection of protocol field related types ***/
+
+ typedef ip-version {
+ type enumeration {
+ enum unknown {
+ value "0";
+ description
+ "An unknown or unspecified version of the Internet protocol.";
+ }
+ enum ipv4 {
+ value "1";
+ description
+ "The IPv4 protocol as defined in RFC 791.";
+ }
+ enum ipv6 {
+ value "2";
+ description
+ "The IPv6 protocol as defined in RFC 2460.";
+ }
+ }
+ description
+ "This value represents the version of the IP protocol.
+
+ In the value set and its semantics, this type is equivalent
+ to the InetVersion textual convention of the SMIv2.";
+ reference
+ "RFC 791: Internet Protocol
+ RFC 2460: Internet Protocol, Version 6 (IPv6) Specification
+ RFC 4001: Textual Conventions for Internet Network Addresses";
+ }
+
+ typedef dscp {
+ type uint8 {
+ range "0..63";
+ }
+ description
+ "The dscp type represents a Differentiated Services Code-Point
+ that may be used for marking packets in a traffic stream.
+
+ In the value set and its semantics, this type is equivalent
+ to the Dscp textual convention of the SMIv2.";
+ reference
+ "RFC 3289: Management Information Base for the Differentiated
+ Services Architecture
+ RFC 2474: Definition of the Differentiated Services Field
+ (DS Field) in the IPv4 and IPv6 Headers
+ RFC 2780: IANA Allocation Guidelines For Values In
+ the Internet Protocol and Related Headers";
+ }
+
+ typedef ipv6-flow-label {
+ type uint32 {
+ range "0..1048575";
+ }
+ description
+ "The flow-label type represents flow identifier or Flow Label
+ in an IPv6 packet header that may be used to discriminate
+ traffic flows.
+
+ In the value set and its semantics, this type is equivalent
+ to the IPv6FlowLabel textual convention of the SMIv2.";
+ reference
+ "RFC 3595: Textual Conventions for IPv6 Flow Label
+ RFC 2460: Internet Protocol, Version 6 (IPv6) Specification";
+ }
+
+ typedef port-number {
+ type uint16 {
+ range "0..65535";
+ }
+ description
+ "The port-number type represents a 16-bit port number of an
+ Internet transport layer protocol such as UDP, TCP, DCCP, or
+ SCTP. Port numbers are assigned by IANA. A current list of
+ all assignments is available from <http://www.iana.org/>.
+
+ Note that the port number value zero is reserved by IANA. In
+ situations where the value zero does not make sense, it can
+ be excluded by subtyping the port-number type.
+
+ In the value set and its semantics, this type is equivalent
+ to the InetPortNumber textual convention of the SMIv2.";
+ reference
+ "RFC 768: User Datagram Protocol
+ RFC 793: Transmission Control Protocol
+ RFC 4960: Stream Control Transmission Protocol
+ RFC 4340: Datagram Congestion Control Protocol (DCCP)
+ RFC 4001: Textual Conventions for Internet Network Addresses";
+ }
+
+ /*** collection of autonomous system related types ***/
+
+ typedef as-number {
+ type uint32;
+ description
+ "The as-number type represents autonomous system numbers
+ which identify an Autonomous System (AS). An AS is a set
+ of routers under a single technical administration, using
+ an interior gateway protocol and common metrics to route
+ packets within the AS, and using an exterior gateway
+ protocol to route packets to other ASs'. IANA maintains
+ the AS number space and has delegated large parts to the
+ regional registries.
+
+ Autonomous system numbers were originally limited to 16
+ bits. BGP extensions have enlarged the autonomous system
+ number space to 32 bits. This type therefore uses an uint32
+ base type without a range restriction in order to support
+ a larger autonomous system number space.
+
+ In the value set and its semantics, this type is equivalent
+ to the InetAutonomousSystemNumber textual convention of
+ the SMIv2.";
+ reference
+ "RFC 1930: Guidelines for creation, selection, and registration
+ of an Autonomous System (AS)
+ RFC 4271: A Border Gateway Protocol 4 (BGP-4)
+ RFC 4893: BGP Support for Four-octet AS Number Space
+ RFC 4001: Textual Conventions for Internet Network Addresses";
+ }
+
+ /*** collection of IP address and hostname related types ***/
+
+ typedef ip-address {
+ type union {
+ type inet:ipv4-address;
+ type inet:ipv6-address;
+ }
+ description
+ "The ip-address type represents an IP address and is IP
+ version neutral. The format of the textual representations
+ implies the IP version.";
+ }
+
+ typedef ipv4-address {
+ type string {
+ pattern
+ '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\.){3}'
+ + '([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])'
+ + '(%[\p{N}\p{L}]+)?';
+ }
+ description
+ "The ipv4-address type represents an IPv4 address in
+ dotted-quad notation. The IPv4 address may include a zone
+ index, separated by a % sign.
+
+ The zone index is used to disambiguate identical address
+ values. For link-local addresses, the zone index will
+ typically be the interface index number or the name of an
+ interface. If the zone index is not present, the default
+ zone of the device will be used.
+
+ The canonical format for the zone index is the numerical
+ format";
+ }
+
+ typedef ipv6-address {
+ type string {
+ pattern '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}'
+ + '((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|'
+ + '(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\.){3}'
+ + '(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))'
+ + '(%[\p{N}\p{L}]+)?';
+ pattern '(([^:]+:){6}(([^:]+:[^:]+)|(.*\..*)))|'
+ + '((([^:]+:)*[^:]+)?::(([^:]+:)*[^:]+)?)'
+ + '(%.+)?';
+ }
+ description
+ "The ipv6-address type represents an IPv6 address in full,
+ mixed, shortened, and shortened-mixed notation. The IPv6
+ address may include a zone index, separated by a % sign.
+
+ The zone index is used to disambiguate identical address
+ values. For link-local addresses, the zone index will
+ typically be the interface index number or the name of an
+ interface. If the zone index is not present, the default
+ zone of the device will be used.
+
+ The canonical format of IPv6 addresses uses the compressed
+ format described in RFC 4291, Section 2.2, item 2 with the
+ following additional rules: the :: substitution must be
+ applied to the longest sequence of all-zero 16-bit chunks
+ in an IPv6 address. If there is a tie, the first sequence
+ of all-zero 16-bit chunks is replaced by ::. Single
+ all-zero 16-bit chunks are not compressed. The canonical
+ format uses lowercase characters and leading zeros are
+ not allowed. The canonical format for the zone index is
+ the numerical format as described in RFC 4007, Section
+ 11.2.";
+ reference
+ "RFC 4291: IP Version 6 Addressing Architecture
+ RFC 4007: IPv6 Scoped Address Architecture
+ RFC 5952: A Recommendation for IPv6 Address Text Representation";
+ }
+
+ typedef ip-prefix {
+ type union {
+ type inet:ipv4-prefix;
+ type inet:ipv6-prefix;
+ }
+ description
+ "The ip-prefix type represents an IP prefix and is IP
+ version neutral. The format of the textual representations
+ implies the IP version.";
+ }
+
+ typedef ipv4-prefix {
+ type string {
+ pattern
+ '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\.){3}'
+ + '([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])'
+ + '/(([0-9])|([1-2][0-9])|(3[0-2]))';
+ }
+ description
+ "The ipv4-prefix type represents an IPv4 address prefix.
+ The prefix length is given by the number following the
+ slash character and must be less than or equal to 32.
+
+ A prefix length value of n corresponds to an IP address
+ mask that has n contiguous 1-bits from the most
+ significant bit (MSB) and all other bits set to 0.
+
+ The canonical format of an IPv4 prefix has all bits of
+ the IPv4 address set to zero that are not part of the
+ IPv4 prefix.";
+ }
+
+ typedef ipv6-prefix {
+ type string {
+ pattern '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}'
+ + '((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|'
+ + '(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\.){3}'
+ + '(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))'
+ + '(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))';
+ pattern '(([^:]+:){6}(([^:]+:[^:]+)|(.*\..*)))|'
+ + '((([^:]+:)*[^:]+)?::(([^:]+:)*[^:]+)?)'
+ + '(/.+)';
+ }
+ description
+ "The ipv6-prefix type represents an IPv6 address prefix.
+ The prefix length is given by the number following the
+ slash character and must be less than or equal 128.
+
+ A prefix length value of n corresponds to an IP address
+ mask that has n contiguous 1-bits from the most
+ significant bit (MSB) and all other bits set to 0.
+
+ The IPv6 address should have all bits that do not belong
+ to the prefix set to zero.
+
+ The canonical format of an IPv6 prefix has all bits of
+ the IPv6 address set to zero that are not part of the
+ IPv6 prefix. Furthermore, IPv6 address is represented
+ in the compressed format described in RFC 4291, Section
+ 2.2, item 2 with the following additional rules: the ::
+ substitution must be applied to the longest sequence of
+ all-zero 16-bit chunks in an IPv6 address. If there is
+ a tie, the first sequence of all-zero 16-bit chunks is
+ replaced by ::. Single all-zero 16-bit chunks are not
+ compressed. The canonical format uses lowercase
+ characters and leading zeros are not allowed.";
+ reference
+ "RFC 4291: IP Version 6 Addressing Architecture";
+ }
+
+ /*** collection of domain name and URI types ***/
+
+ typedef domain-name {
+ type string {
+ pattern '((([a-zA-Z0-9_]([a-zA-Z0-9\-_]){0,61})?[a-zA-Z0-9]\.)*'
+ + '([a-zA-Z0-9_]([a-zA-Z0-9\-_]){0,61})?[a-zA-Z0-9]\.?)'
+ + '|\.';
+ length "1..253";
+ }
+ description
+ "The domain-name type represents a DNS domain name. The
+ name SHOULD be fully qualified whenever possible.
+
+ Internet domain names are only loosely specified. Section
+ 3.5 of RFC 1034 recommends a syntax (modified in Section
+ 2.1 of RFC 1123). The pattern above is intended to allow
+ for current practice in domain name use, and some possible
+ future expansion. It is designed to hold various types of
+ domain names, including names used for A or AAAA records
+ (host names) and other records, such as SRV records. Note
+ that Internet host names have a stricter syntax (described
+ in RFC 952) than the DNS recommendations in RFCs 1034 and
+ 1123, and that systems that want to store host names in
+ schema nodes using the domain-name type are recommended to
+ adhere to this stricter standard to ensure interoperability.
+
+ The encoding of DNS names in the DNS protocol is limited
+ to 255 characters. Since the encoding consists of labels
+ prefixed by a length bytes and there is a trailing NULL
+ byte, only 253 characters can appear in the textual dotted
+ notation.
+
+ The description clause of schema nodes using the domain-name
+ type MUST describe when and how these names are resolved to
+ IP addresses. Note that the resolution of a domain-name value
+ may require to query multiple DNS records (e.g., A for IPv4
+ and AAAA for IPv6). The order of the resolution process and
+ which DNS record takes precedence can either be defined
+ explicitely or it may depend on the configuration of the
+ resolver.
+
+ Domain-name values use the US-ASCII encoding. Their canonical
+ format uses lowercase US-ASCII characters. Internationalized
+ domain names MUST be encoded in punycode as described in RFC
+ 3492";
+ reference
+ "RFC 952: DoD Internet Host Table Specification
+ RFC 1034: Domain Names - Concepts and Facilities
+ RFC 1123: Requirements for Internet Hosts -- Application
+ and Support
+ RFC 2782: A DNS RR for specifying the location of services
+ (DNS SRV)
+ RFC 3492: Punycode: A Bootstring encoding of Unicode for
+ Internationalized Domain Names in Applications
+ (IDNA)
+ RFC 5891: Internationalizing Domain Names in Applications
+ (IDNA): Protocol";
+ }
+
+ typedef host {
+ type union {
+ type inet:ip-address;
+ type inet:domain-name;
+ }
+ description
+ "The host type represents either an IP address or a DNS
+ domain name.";
+ }
+
+ typedef uri {
+ type string;
+ description
+ "The uri type represents a Uniform Resource Identifier
+ (URI) as defined by STD 66.
+
+ Objects using the uri type MUST be in US-ASCII encoding,
+ and MUST be normalized as described by RFC 3986 Sections
+ 6.2.1, 6.2.2.1, and 6.2.2.2. All unnecessary
+ percent-encoding is removed, and all case-insensitive
+ characters are set to lowercase except for hexadecimal
+ digits, which are normalized to uppercase as described in
+ Section 6.2.2.1.
+
+ The purpose of this normalization is to help provide
+ unique URIs. Note that this normalization is not
+ sufficient to provide uniqueness. Two URIs that are
+ textually distinct after this normalization may still be
+ equivalent.
+
+ Objects using the uri type may restrict the schemes that
+ they permit. For example, 'data:' and 'urn:' schemes
+ might not be appropriate.
+
+ A zero-length URI is not a valid URI. This can be used to
+ express 'URI absent' where required.
+
+ In the value set and its semantics, this type is equivalent
+ to the Uri SMIv2 textual convention defined in RFC 5017.";
+ reference
+ "RFC 3986: Uniform Resource Identifier (URI): Generic Syntax
+ RFC 3305: Report from the Joint W3C/IETF URI Planning Interest
+ Group: Uniform Resource Identifiers (URIs), URLs,
+ and Uniform Resource Names (URNs): Clarifications
+ and Recommendations
+ RFC 5017: MIB Textual Conventions for Uniform Resource
+ Identifiers (URIs)";
+ }
+
+ }
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>model-ietf</artifactId>
+ <groupId>org.opendaylight.controller</groupId>
+ <version>0.5-SNAPSHOT</version>
+ </parent>
+ <artifactId>ietf-yang-types</artifactId>
+ <version>2010.09.24-SNAPSHOT</version>
+</project>
\ No newline at end of file
--- /dev/null
+ module ietf-yang-types {
+
+ namespace "urn:ietf:params:xml:ns:yang:ietf-yang-types";
+ prefix "yang";
+
+ organization
+ "IETF NETMOD (NETCONF Data Modeling Language) Working Group";
+
+ contact
+ "WG Web: <http://tools.ietf.org/wg/netmod/>
+ WG List: <mailto:netmod@ietf.org>
+
+ WG Chair: David Partain
+ <mailto:david.partain@ericsson.com>
+
+ WG Chair: David Kessens
+ <mailto:david.kessens@nsn.com>
+
+ Editor: Juergen Schoenwaelder
+ <mailto:j.schoenwaelder@jacobs-university.de>";
+
+ description
+ "This module contains a collection of generally useful derived
+ YANG data types.
+
+ Copyright (c) 2010 IETF Trust and the persons identified as
+ authors of the code. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, is permitted pursuant to, and subject to the license
+ terms contained in, the Simplified BSD License set forth in Section
+ 4.c of the IETF Trust's Legal Provisions Relating to IETF Documents
+ (http://trustee.ietf.org/license-info).
+
+ This version of this YANG module is part of RFC 6021; see
+ the RFC itself for full legal notices.";
+
+ revision 2010-09-24 {
+ description
+ "Initial revision.";
+ reference
+ "RFC 6021: Common YANG Data Types";
+ }
+
+ /*** collection of counter and gauge types ***/
+
+ typedef counter32 {
+ type uint32;
+ description
+ "The counter32 type represents a non-negative integer
+ that monotonically increases until it reaches a
+ maximum value of 2^32-1 (4294967295 decimal), when it
+ wraps around and starts increasing again from zero.
+
+ Counters have no defined 'initial' value, and thus, a
+ single value of a counter has (in general) no information
+ content. Discontinuities in the monotonically increasing
+ value normally occur at re-initialization of the
+ management system, and at other times as specified in the
+ description of a schema node using this type. If such
+ other times can occur, for example, the creation of
+ a schema node of type counter32 at times other than
+ re-initialization, then a corresponding schema node
+ should be defined, with an appropriate type, to indicate
+ the last discontinuity.
+
+ The counter32 type should not be used for configuration
+ schema nodes. A default statement SHOULD NOT be used in
+ combination with the type counter32.
+
+ In the value set and its semantics, this type is equivalent
+ to the Counter32 type of the SMIv2.";
+ reference
+ "RFC 2578: Structure of Management Information Version 2 (SMIv2)";
+ }
+
+ typedef zero-based-counter32 {
+ type yang:counter32;
+ default "0";
+ description
+ "The zero-based-counter32 type represents a counter32
+ that has the defined 'initial' value zero.
+
+ A schema node of this type will be set to zero (0) on creation
+ and will thereafter increase monotonically until it reaches
+ a maximum value of 2^32-1 (4294967295 decimal), when it
+ wraps around and starts increasing again from zero.
+
+ Provided that an application discovers a new schema node
+ of this type within the minimum time to wrap, it can use the
+ 'initial' value as a delta. It is important for a management
+ station to be aware of this minimum time and the actual time
+ between polls, and to discard data if the actual time is too
+ long or there is no defined minimum time.
+
+ In the value set and its semantics, this type is equivalent
+ to the ZeroBasedCounter32 textual convention of the SMIv2.";
+ reference
+ "RFC 4502: Remote Network Monitoring Management Information
+ Base Version 2";
+ }
+
+ typedef counter64 {
+ type uint64;
+ description
+ "The counter64 type represents a non-negative integer
+ that monotonically increases until it reaches a
+ maximum value of 2^64-1 (18446744073709551615 decimal),
+ when it wraps around and starts increasing again from zero.
+
+ Counters have no defined 'initial' value, and thus, a
+ single value of a counter has (in general) no information
+ content. Discontinuities in the monotonically increasing
+ value normally occur at re-initialization of the
+ management system, and at other times as specified in the
+ description of a schema node using this type. If such
+ other times can occur, for example, the creation of
+ a schema node of type counter64 at times other than
+ re-initialization, then a corresponding schema node
+ should be defined, with an appropriate type, to indicate
+ the last discontinuity.
+
+ The counter64 type should not be used for configuration
+ schema nodes. A default statement SHOULD NOT be used in
+ combination with the type counter64.
+
+ In the value set and its semantics, this type is equivalent
+ to the Counter64 type of the SMIv2.";
+ reference
+ "RFC 2578: Structure of Management Information Version 2 (SMIv2)";
+ }
+
+ typedef zero-based-counter64 {
+ type yang:counter64;
+ default "0";
+ description
+ "The zero-based-counter64 type represents a counter64 that
+ has the defined 'initial' value zero.
+
+ A schema node of this type will be set to zero (0) on creation
+ and will thereafter increase monotonically until it reaches
+ a maximum value of 2^64-1 (18446744073709551615 decimal),
+ when it wraps around and starts increasing again from zero.
+
+ Provided that an application discovers a new schema node
+ of this type within the minimum time to wrap, it can use the
+ 'initial' value as a delta. It is important for a management
+ station to be aware of this minimum time and the actual time
+ between polls, and to discard data if the actual time is too
+ long or there is no defined minimum time.
+
+ In the value set and its semantics, this type is equivalent
+ to the ZeroBasedCounter64 textual convention of the SMIv2.";
+ reference
+ "RFC 2856: Textual Conventions for Additional High Capacity
+ Data Types";
+ }
+
+ typedef gauge32 {
+ type uint32;
+ description
+ "The gauge32 type represents a non-negative integer, which
+ may increase or decrease, but shall never exceed a maximum
+ value, nor fall below a minimum value. The maximum value
+ cannot be greater than 2^32-1 (4294967295 decimal), and
+ the minimum value cannot be smaller than 0. The value of
+ a gauge32 has its maximum value whenever the information
+ being modeled is greater than or equal to its maximum
+ value, and has its minimum value whenever the information
+ being modeled is smaller than or equal to its minimum value.
+ If the information being modeled subsequently decreases
+ below (increases above) the maximum (minimum) value, the
+ gauge32 also decreases (increases).
+
+ In the value set and its semantics, this type is equivalent
+ to the Gauge32 type of the SMIv2.";
+ reference
+ "RFC 2578: Structure of Management Information Version 2 (SMIv2)";
+ }
+
+ typedef gauge64 {
+ type uint64;
+ description
+ "The gauge64 type represents a non-negative integer, which
+ may increase or decrease, but shall never exceed a maximum
+ value, nor fall below a minimum value. The maximum value
+ cannot be greater than 2^64-1 (18446744073709551615), and
+ the minimum value cannot be smaller than 0. The value of
+ a gauge64 has its maximum value whenever the information
+ being modeled is greater than or equal to its maximum
+ value, and has its minimum value whenever the information
+ being modeled is smaller than or equal to its minimum value.
+ If the information being modeled subsequently decreases
+ below (increases above) the maximum (minimum) value, the
+ gauge64 also decreases (increases).
+
+ In the value set and its semantics, this type is equivalent
+ to the CounterBasedGauge64 SMIv2 textual convention defined
+ in RFC 2856";
+ reference
+ "RFC 2856: Textual Conventions for Additional High Capacity
+ Data Types";
+ }
+
+ /*** collection of identifier related types ***/
+
+ typedef object-identifier {
+ type string {
+ pattern '(([0-1](\.[1-3]?[0-9]))|(2\.(0|([1-9]\d*))))'
+ + '(\.(0|([1-9]\d*)))*';
+ }
+ description
+ "The object-identifier type represents administratively
+ assigned names in a registration-hierarchical-name tree.
+
+ Values of this type are denoted as a sequence of numerical
+ non-negative sub-identifier values. Each sub-identifier
+ value MUST NOT exceed 2^32-1 (4294967295). Sub-identifiers
+ are separated by single dots and without any intermediate
+ whitespace.
+
+ The ASN.1 standard restricts the value space of the first
+ sub-identifier to 0, 1, or 2. Furthermore, the value space
+ of the second sub-identifier is restricted to the range
+ 0 to 39 if the first sub-identifier is 0 or 1. Finally,
+ the ASN.1 standard requires that an object identifier
+ has always at least two sub-identifier. The pattern
+ captures these restrictions.
+
+ Although the number of sub-identifiers is not limited,
+ module designers should realize that there may be
+ implementations that stick with the SMIv2 limit of 128
+ sub-identifiers.
+
+ This type is a superset of the SMIv2 OBJECT IDENTIFIER type
+ since it is not restricted to 128 sub-identifiers. Hence,
+ this type SHOULD NOT be used to represent the SMIv2 OBJECT
+ IDENTIFIER type, the object-identifier-128 type SHOULD be
+ used instead.";
+ reference
+ "ISO9834-1: Information technology -- Open Systems
+ Interconnection -- Procedures for the operation of OSI
+ Registration Authorities: General procedures and top
+ arcs of the ASN.1 Object Identifier tree";
+ }
+
+
+
+
+ typedef object-identifier-128 {
+ type object-identifier {
+ pattern '\d*(\.\d*){1,127}';
+ }
+ description
+ "This type represents object-identifiers restricted to 128
+ sub-identifiers.
+
+ In the value set and its semantics, this type is equivalent
+ to the OBJECT IDENTIFIER type of the SMIv2.";
+ reference
+ "RFC 2578: Structure of Management Information Version 2 (SMIv2)";
+ }
+
+ /*** collection of date and time related types ***/
+
+ typedef date-and-time {
+ type string {
+ pattern '\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?'
+ + '(Z|[\+\-]\d{2}:\d{2})';
+ }
+ description
+ "The date-and-time type is a profile of the ISO 8601
+ standard for representation of dates and times using the
+ Gregorian calendar. The profile is defined by the
+ date-time production in Section 5.6 of RFC 3339.
+
+ The date-and-time type is compatible with the dateTime XML
+ schema type with the following notable exceptions:
+
+ (a) The date-and-time type does not allow negative years.
+
+ (b) The date-and-time time-offset -00:00 indicates an unknown
+ time zone (see RFC 3339) while -00:00 and +00:00 and Z all
+ represent the same time zone in dateTime.
+
+ (c) The canonical format (see below) of data-and-time values
+ differs from the canonical format used by the dateTime XML
+ schema type, which requires all times to be in UTC using the
+ time-offset 'Z'.
+
+ This type is not equivalent to the DateAndTime textual
+ convention of the SMIv2 since RFC 3339 uses a different
+ separator between full-date and full-time and provides
+ higher resolution of time-secfrac.
+
+ The canonical format for date-and-time values with a known time
+ zone uses a numeric time zone offset that is calculated using
+ the device's configured known offset to UTC time. A change of
+ the device's offset to UTC time will cause date-and-time values
+ to change accordingly. Such changes might happen periodically
+ in case a server follows automatically daylight saving time
+ (DST) time zone offset changes. The canonical format for
+ date-and-time values with an unknown time zone (usually referring
+ to the notion of local time) uses the time-offset -00:00.";
+ reference
+ "RFC 3339: Date and Time on the Internet: Timestamps
+ RFC 2579: Textual Conventions for SMIv2
+ XSD-TYPES: XML Schema Part 2: Datatypes Second Edition";
+ }
+
+ typedef timeticks {
+ type uint32;
+ description
+ "The timeticks type represents a non-negative integer that
+ represents the time, modulo 2^32 (4294967296 decimal), in
+ hundredths of a second between two epochs. When a schema
+ node is defined that uses this type, the description of
+ the schema node identifies both of the reference epochs.
+
+ In the value set and its semantics, this type is equivalent
+ to the TimeTicks type of the SMIv2.";
+ reference
+ "RFC 2578: Structure of Management Information Version 2 (SMIv2)";
+ }
+
+ typedef timestamp {
+ type yang:timeticks;
+ description
+ "The timestamp type represents the value of an associated
+ timeticks schema node at which a specific occurrence happened.
+ The specific occurrence must be defined in the description
+ of any schema node defined using this type. When the specific
+ occurrence occurred prior to the last time the associated
+ timeticks attribute was zero, then the timestamp value is
+ zero. Note that this requires all timestamp values to be
+ reset to zero when the value of the associated timeticks
+ attribute reaches 497+ days and wraps around to zero.
+
+ The associated timeticks schema node must be specified
+ in the description of any schema node using this type.
+
+ In the value set and its semantics, this type is equivalent
+ to the TimeStamp textual convention of the SMIv2.";
+ reference
+ "RFC 2579: Textual Conventions for SMIv2";
+ }
+
+ /*** collection of generic address types ***/
+
+ typedef phys-address {
+ type string {
+ pattern '([0-9a-fA-F]{2}(:[0-9a-fA-F]{2})*)?';
+ }
+ description
+ "Represents media- or physical-level addresses represented
+ as a sequence octets, each octet represented by two hexadecimal
+ numbers. Octets are separated by colons. The canonical
+ representation uses lowercase characters.
+
+ In the value set and its semantics, this type is equivalent
+ to the PhysAddress textual convention of the SMIv2.";
+ reference
+ "RFC 2579: Textual Conventions for SMIv2";
+ }
+
+ typedef mac-address {
+ type string {
+ pattern '[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}';
+ }
+ description
+ "The mac-address type represents an IEEE 802 MAC address.
+ The canonical representation uses lowercase characters.
+
+ In the value set and its semantics, this type is equivalent
+ to the MacAddress textual convention of the SMIv2.";
+ reference
+ "IEEE 802: IEEE Standard for Local and Metropolitan Area
+ Networks: Overview and Architecture
+ RFC 2579: Textual Conventions for SMIv2";
+ }
+
+ /*** collection of XML specific types ***/
+
+ typedef xpath1.0 {
+ type string;
+ description
+ "This type represents an XPATH 1.0 expression.
+
+ When a schema node is defined that uses this type, the
+ description of the schema node MUST specify the XPath
+ context in which the XPath expression is evaluated.";
+ reference
+ "XPATH: XML Path Language (XPath) Version 1.0";
+ }
+
+ }
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>model-parent</artifactId>
+ <groupId>org.opendaylight.controller</groupId>
+ <version>0.5-SNAPSHOT</version>
+ </parent>
+ <artifactId>model-ietf</artifactId>
+ <packaging>pom</packaging>
+ <modules>
+ <module>ietf-inet-types</module>
+ <module>ietf-yang-types</module>
+ </modules>
+</project>
\ No newline at end of file
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>model-parent</artifactId>
+ <groupId>org.opendaylight.controller</groupId>
+ <version>0.5-SNAPSHOT</version>
+ </parent>
+ <artifactId>model-openflow</artifactId>
+</project>
\ No newline at end of file
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>model-parent</artifactId>
+ <groupId>org.opendaylight.controller</groupId>
+ <version>0.5-SNAPSHOT</version>
+ </parent>
+ <artifactId>model-topology-bgp</artifactId>
+</project>
\ No newline at end of file
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>model-parent</artifactId>
+ <groupId>org.opendaylight.controller</groupId>
+ <version>0.5-SNAPSHOT</version>
+ </parent>
+ <artifactId>model-topology</artifactId>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>ietf-inet-types</artifactId>
+ <version>2010.09.24-SNAPSHOT</version>
+ </dependency>
+ </dependencies>
+</project>
\ No newline at end of file
--- /dev/null
+module topology {
+ yang-version 1;
+ namespace "urn:ietf:params:xml:ns:yang:topology";
+ prefix "tp";
+
+ import ietf-inet-types { prefix "inet"; }
+
+ organization "TBD";
+
+ contact "WILL-BE-DEFINED-LATER";
+
+ description "";
+
+ revision 2013-06-11 {
+ description "Updated model with review comments.
+ Removed top-level container network.
+ Moved network elements to separate module.";
+ }
+
+ typedef topology-id {
+ type inet:uri;
+ }
+
+ typedef node-id {
+ description "Node abstract identifier, schema for URI will be defined by augmentation";
+ type inet:uri;
+ }
+
+ typedef link-id {
+ description "Node abstract identifier, schema for URI will be defined by augmentation";
+ type inet:uri;
+ }
+
+ typedef tp-id {
+ type inet:uri;
+ description "identifier for termination points on a port";
+ }
+
+ typedef tp-ref {
+ type leafref {
+ path "/topologies/topology/nodes/node/termination-points/termination-point/tp-id";
+ }
+ }
+ typedef topology-ref {
+ type leafref {
+ path "/topologies/topology/topology-id";
+ }
+ description "This type is used for leafs that reference topology identifier instance.";
+ // currently not used
+ }
+
+ typedef node-ref {
+ type leafref {
+ path "/topologies/topology/nodes/node/node-id";
+ }
+ description "This type is used for leafs that reference a node instance.";
+ }
+
+ typedef link-ref {
+ type leafref {
+ path "/topologies/topology/links/link/link-id";
+ }
+ description "This type is used for leafs that reference a link instance.";
+ // currently not used
+ }
+
+ container topologies {
+ list topology {
+ description "
+ This is the model of abstract topology which contains only Network
+ Nodes and Network Links. Each topology MUST be identified by
+ unique topology-id for reason that the store could contain many
+ topologies.
+ ";
+ key "topology-id";
+ leaf topology-id {
+ type topology-id;
+ description "
+ It is presumed that datastore will contain many topologies. To
+ distinguish between topologies it is vital to have UNIQUE
+ topology identifier.
+ ";
+ }
+
+ container types {
+ description "
+ The container for definition of topology types.
+ The augmenting modules should add empty optional leaf
+ to this container to signalize topology type.
+ ";
+ }
+
+ container nodes {
+ list node {
+ description "The list of network nodes defined for topology.";
+
+ key "node-id";
+ leaf node-id {
+ type node-id;
+ description "The Topology identifier of network-node.";
+ }
+
+ //leaf supporting-ne {
+ // type network-element-ref;
+ //}
+
+ container termination-points {
+ list termination-point {
+ key "tp-id";
+ leaf tp-id {
+ type tp-id;
+ }
+ }
+ }
+ }
+ }
+
+ container links {
+ list link {
+ description "
+ The Network Link which is defined by Local (Source) and
+ Remote (Destination) Network Nodes. Every link MUST be
+ defined either by identifier and his local and remote
+ Network Nodes (in real applications it is common that many
+ links are originated from one node and end up in same
+ remote node). To ensure that we would always know to
+ distinguish between links, every link SHOULD have
+ identifier.
+ ";
+ key "link-id";
+
+ leaf link-id {
+ type link-id;
+ description "";
+ }
+
+ container source {
+ leaf source-node {
+ type node-ref;
+ description "Source node identifier.";
+ }
+ leaf source-tp {
+ type tp-ref;
+ }
+ }
+
+ container destination {
+ leaf dest-node {
+ type node-ref;
+ description "Destination node identifier.";
+ }
+ leaf dest-tp {
+ type tp-ref;
+ }
+ }
+ }
+ }
+ }
+ }
+}
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>yang-prototype</artifactId>
+ <groupId>org.opendaylight.controller</groupId>
+ <version>0.5-SNAPSHOT</version>
+ </parent>
+ <artifactId>model-parent</artifactId>
+ <packaging>pom</packaging>
+ <modules>
+ <module>ietf</module>
+ <module>model-topology</module>
+ <module>model-openflow</module>
+ <!--module>model-topology-bgp</module-->
+ </modules>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>yang-maven-plugin</artifactId>
+ <version>0.5.3-SNAPSHOT</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>generate-sources</goal>
+ </goals>
+ <configuration>
+ <yangFilesRootDir>src/main/yang</yangFilesRootDir>
+ <codeGenerators>
+ <generator>
+ <codeGeneratorClass>
+ org.opendaylight.controller.maven.sal.api.gen.plugin.CodeGeneratorImpl
+ </codeGeneratorClass>
+ <outputBaseDir>
+ target/generated-sources/sal
+ </outputBaseDir>
+ </generator>
+ </codeGenerators>
+ <inspectDependencies>true</inspectDependencies>
+ </configuration>
+ </execution>
+ </executions>
+ <dependencies>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>maven-sal-api-gen-plugin</artifactId>
+ <version>0.5.3-SNAPSHOT</version>
+ <type>jar</type>
+ </dependency>
+ </dependencies>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <version>1.7</version>
+ <executions>
+ <execution>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>target/generated-sources/sal</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ <pluginManagement>
+ <plugins>
+ <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
+ <plugin>
+ <groupId>org.eclipse.m2e</groupId>
+ <artifactId>lifecycle-mapping</artifactId>
+ <version>1.0.0</version>
+ <configuration>
+ <lifecycleMappingMetadata>
+ <pluginExecutions>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>
+ org.opendaylight.controller
+ </groupId>
+ <artifactId>
+ yang-maven-plugin
+ </artifactId>
+ <versionRange>
+ [0.5,)
+ </versionRange>
+ <goals>
+ <goal>
+ generate-sources
+ </goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore></ignore>
+ </action>
+ </pluginExecution>
+ </pluginExecutions>
+ </lifecycleMappingMetadata>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>yang-binding</artifactId>
+ <version>0.5.3-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>yang-common</artifactId>
+ <version>0.5.3-SNAPSHOT</version>
+ </dependency>
+ </dependencies>
+</project>
\ No newline at end of file
<modules>
<module>yang</module>
<module>code-generator</module>
+ <module>model</module>
</modules>
<properties>
<artifactId>yang-prototype</artifactId>
<version>0.5-SNAPSHOT</version>
</parent>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.5.3-SNAPSHOT</version>
<artifactId>yang</artifactId>
<packaging>pom</packaging>
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>yang</artifactId>\r
- <version>0.5.2-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
</parent>\r
<artifactId>yang-binding</artifactId>\r
</project>
\ No newline at end of file
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>yang</artifactId>\r
- <version>0.5.2-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
</parent>\r
<artifactId>yang-common</artifactId>\r
<dependencies>\r
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>yang</artifactId>\r
- <version>0.5.2-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
</parent>\r
<artifactId>yang-data-api</artifactId>\r
\r
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>yang</artifactId>\r
- <version>0.5.2-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
</parent>\r
<artifactId>yang-data-util</artifactId>\r
<dependencies>\r
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>yang</artifactId>\r
- <version>0.5.2-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
</parent>\r
<artifactId>yang-model-api</artifactId>\r
<dependencies>\r
* @return ChoiceCaseNode objects defined in this node
*/
Set<ChoiceCaseNode> getCases();
-
+
String getDefaultCase();
}
\r
List<ExtensionDefinition> getExtensionSchemaNodes();\r
\r
+ List<UnknownSchemaNode> getUnknownSchemaNodes();\r
+\r
}\r
package org.opendaylight.controller.yang.model.api;\r
\r
/**\r
- * Interface describing YANG 'notification' statement. The\r
- * notification statement is used to define a NETCONF notification.\r
+ * Interface describing YANG 'notification' statement. The notification\r
+ * statement is used to define a NETCONF notification.\r
*/\r
-public interface NotificationDefinition extends SchemaNode, DataNodeContainer {\r
+public interface NotificationDefinition extends SchemaNode, DataNodeContainer, AugmentationTarget {\r
\r
}\r
<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>yang</artifactId>\r
- <version>0.5.2-SNAPSHOT</version>\r
+ <version>0.5.3-SNAPSHOT</version>\r
</parent>\r
<artifactId>yang-model-util</artifactId>\r
<dependencies>\r
* interface which represents UNSIGNED Integer values defined in Yang language. <br>
* The integer built-in types in Yang are uint8, uint16, uint32, and uint64.
* They represent unsigned integers of different sizes:
- *
+ *
* <ul>
* <li>uint8 - represents integer values between 0 and 255, inclusively.</li>
* <li>uint16 - represents integer values between 0 and 65535, inclusively.</li>
* <li>uint64 - represents integer values between 0 and 18446744073709551615,
* inclusively.</li>
* </ul>
- *
+ *
*/
public abstract class AbstractUnsignedInteger implements UnsignedIntegerTypeDefinition {
private static final long MIN_VALUE = 0;
private final List<RangeConstraint> rangeStatements;
/**
- *
+ *
* @param actualPath
* @param namespace
* @param revision
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
package org.opendaylight.controller.yang.model.util;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
-import org.opendaylight.controller.yang.model.api.ContainerSchemaNode;
-import org.opendaylight.controller.yang.model.api.DataNodeContainer;
-import org.opendaylight.controller.yang.model.api.DataSchemaNode;
-import org.opendaylight.controller.yang.model.api.GroupingDefinition;
-import org.opendaylight.controller.yang.model.api.LeafListSchemaNode;
-import org.opendaylight.controller.yang.model.api.LeafSchemaNode;
-import org.opendaylight.controller.yang.model.api.ListSchemaNode;
+import org.opendaylight.controller.yang.model.api.*;
public class DataNodeIterator implements Iterator<DataSchemaNode> {
private final DataNodeContainer container;
- private List<ListSchemaNode> allLists;
- private List<ContainerSchemaNode> allContainers;
- private List<LeafSchemaNode> allLeafs;
- private List<LeafListSchemaNode> allLeafLists;
- private List<DataSchemaNode> allChilds;
+ private final List<ListSchemaNode> allLists;
+ private final List<ContainerSchemaNode> allContainers;
+ private final List<ChoiceNode> allChoices;
+ private final List<DataSchemaNode> allChilds;
public DataNodeIterator(final DataNodeContainer container) {
if (container == null) {
throw new IllegalArgumentException("Data Node Container MUST be specified and cannot be NULL!");
}
- init();
+ this.allContainers = new ArrayList<>();
+ this.allLists = new ArrayList<>();
+ this.allChilds = new ArrayList<>();
+ this.allChoices = new ArrayList<>();
+
this.container = container;
traverse(this.container);
}
- private void init() {
- this.allContainers = new ArrayList<ContainerSchemaNode>();
- this.allLists = new ArrayList<ListSchemaNode>();
- this.allLeafs = new ArrayList<LeafSchemaNode>();
- this.allLeafLists = new ArrayList<LeafListSchemaNode>();
- this.allChilds = new ArrayList<DataSchemaNode>();
- }
-
public List<ContainerSchemaNode> allContainers() {
return allContainers;
}
return allLists;
}
- public List<LeafSchemaNode> allLeafs() {
- return allLeafs;
- }
-
- public List<LeafListSchemaNode> allLeafLists() {
- return allLeafLists;
+ public List<ChoiceNode> allChoices() {
+ return allChoices;
}
private void traverse(final DataNodeContainer dataNode) {
return;
}
- final Set<DataSchemaNode> childs = dataNode.getChildNodes();
- if (childs != null) {
- for (DataSchemaNode childNode : childs) {
+ final Set<DataSchemaNode> childNodes = dataNode.getChildNodes();
+ if (childNodes != null) {
+ for (DataSchemaNode childNode : childNodes) {
if (childNode.isAugmenting()) {
continue;
}
final ListSchemaNode list = (ListSchemaNode) childNode;
allLists.add(list);
traverse(list);
- } else if (childNode instanceof LeafSchemaNode) {
- final LeafSchemaNode leaf = (LeafSchemaNode) childNode;
- allLeafs.add(leaf);
- } else if (childNode instanceof LeafListSchemaNode) {
- final LeafListSchemaNode leafList = (LeafListSchemaNode) childNode;
- allLeafLists.add(leafList);
+ } else if (childNode instanceof ChoiceNode) {
+ final ChoiceNode choiceNode = (ChoiceNode) childNode;
+ allChoices.add(choiceNode);
+ final Set<ChoiceCaseNode> cases = choiceNode.getCases();
+ if (cases != null) {
+ for (final ChoiceCaseNode caseNode : cases) {
+ traverse(caseNode);
+ }
+ }
}
}
}
final Set<GroupingDefinition> groupings = dataNode.getGroupings();
- if(groupings != null) {
- for(GroupingDefinition grouping : groupings) {
+ if (groupings != null) {
+ for (GroupingDefinition grouping : groupings) {
traverse(grouping);
}
}
@Override
public boolean hasNext() {
if (container.getChildNodes() != null) {
- Set<DataSchemaNode> childs = container.getChildNodes();
+ final Set<DataSchemaNode> childNodes = container.getChildNodes();
- if ((childs != null) && !childs.isEmpty()) {
- return childs.iterator().hasNext();
+ if ((childNodes != null) && !childNodes.isEmpty()) {
+ return childNodes.iterator().hasNext();
}
}
return false;
});
// toggle button
var toggle;
- if (flow['flow']['installInHw'] == 'true') {
+ if (flow['flow']['installInHw'] == 'true' && flow['flow']['status'] == 'Success') {
toggle = one.lib.dashlet.button.single("Uninstall Flow", one.f.flows.id.dashlet.toggle, "btn-warning", "btn-mini");
} else {
toggle = one.lib.dashlet.button.single("Install Flow", one.f.flows.id.dashlet.toggle, "btn-success", "btn-mini");
var entry = [];
entry.push(value['name']);
entry.push(value['node']);
- if (value['flow']['installInHw'] == 'true')
+ if (value['flow']['installInHw'] == 'true' && value['flow']['status'] == 'Success')
tr['type'] = ['success'];
- else if (value['flow']['installInHw'] == 'false')
+ else if (value['flow']['installInHw'] == 'false' && value['flow']['status'] == 'Success')
+ tr['type'] = ['warning'];
+ else
tr['type'] = ['warning'];
tr['entry'] = entry;
tr['id'] = value['nodeId'];