reused the existing node listeners to propogate child updates.
now the data gets copied from node listener only
added config manager listener to reconstruct ha cache upon restart
even if the node is not connected back post restart
fixed npe in ha ps node creation
copied db version of child ps node to parent ps node
Change-Id: Ic07bb876d05f02114e920ff270386b7f5324100b
Signed-off-by: K.V Suneelu Verma <k.v.suneelu.verma@ericsson.com>
return null;
}
+ public static String getPsName(Node psNode) {
+ String psNodeId = psNode.getNodeId().getValue();
+ if (psNodeId.contains(PHYSICALSWITCH)) {
+ return psNodeId.substring(psNodeId.indexOf(PHYSICALSWITCH) + PHYSICALSWITCH.length());
+ }
+ return null;
+ }
+
+ public static String getPsName(InstanceIdentifier<Node> psNodeIid) {
+ String psNodeId = psNodeIid.firstKeyOf(Node.class).getNodeId().getValue();
+ if (psNodeId.contains(PHYSICALSWITCH)) {
+ return psNodeId.substring(psNodeId.indexOf(PHYSICALSWITCH) + PHYSICALSWITCH.length());
+ }
+ return null;
+ }
+
public static InstanceIdentifier<Node> getGlobalNodePathFromPSNode(Node psNode) {
- if (psNode == null
- || psNode.getAugmentation(PhysicalSwitchAugmentation.class) == null
- || psNode.getAugmentation(PhysicalSwitchAugmentation.class).getManagedBy() == null) {
- return null;
+ String psNodeId = psNode.getNodeId().getValue();
+ if (psNodeId.contains(PHYSICALSWITCH)) {
+ return convertToInstanceIdentifier(psNodeId.substring(0, psNodeId.indexOf(PHYSICALSWITCH)));
}
- return (InstanceIdentifier<Node>)psNode
- .getAugmentation(PhysicalSwitchAugmentation.class).getManagedBy().getValue();
+ return convertToInstanceIdentifier(psNodeId);
}
public static InstanceIdentifier<Node> convertPsPath(Node psNode, InstanceIdentifier<Node> nodePath) {
String psNodeId = psNode.getNodeId().getValue();
- String psName = psNodeId.substring(psNodeId.indexOf(PHYSICALSWITCH) + PHYSICALSWITCH.length());
- String haPsNodeIdVal = nodePath.firstKeyOf(Node.class).getNodeId().getValue() + PHYSICALSWITCH + psName;
- InstanceIdentifier<Node> haPsPath = convertToInstanceIdentifier(haPsNodeIdVal);
- return haPsPath;
+ if (psNodeId.contains(PHYSICALSWITCH)) {
+ String psName = psNodeId.substring(psNodeId.indexOf(PHYSICALSWITCH) + PHYSICALSWITCH.length());
+ String haPsNodeIdVal = nodePath.firstKeyOf(Node.class).getNodeId().getValue() + PHYSICALSWITCH + psName;
+ InstanceIdentifier<Node> haPsPath = convertToInstanceIdentifier(haPsNodeIdVal);
+ return haPsPath;
+ } else {
+ LOG.error("Failed to find ps path from node {}", psNode);
+ return null;
+ }
}
public static NodeBuilder getNodeBuilderForPath(InstanceIdentifier<Node> haPath) {
}
return true;
}
-
- public static String getPsName(InstanceIdentifier<Node> psNodeIid) {
- String psNodeId = psNodeIid.firstKeyOf(Node.class).getNodeId().getValue();
- if (psNodeId.contains(PHYSICALSWITCH)) {
- return psNodeId.substring(psNodeId.indexOf(PHYSICALSWITCH) + PHYSICALSWITCH.length());
- }
- return null;
- }
}
@Override
public LogicalSwitches withoutUuid(LogicalSwitches data) {
- return new LogicalSwitchesBuilder(data).setHwvtepNodeName(null).build();
+ return new LogicalSwitchesBuilder(data).setLogicalSwitchUuid(null).build();
}
}
package org.opendaylight.netvirt.elan.l2gw.ha.handlers;
import java.util.concurrent.ExecutionException;
+
+import org.opendaylight.controller.md.sal.binding.api.DataObjectModification;
import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
-import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
-import org.opendaylight.netvirt.elan.l2gw.ha.HwvtepHAUtil;
import org.opendaylight.netvirt.elan.l2gw.ha.merge.GlobalAugmentationMerger;
import org.opendaylight.netvirt.elan.l2gw.ha.merge.GlobalNodeMerger;
import org.opendaylight.netvirt.elan.l2gw.ha.merge.PSAugmentationMerger;
* @param haUpdated HA node updated
* @param haOriginal HA node original
* @param haChildNodeId HA child node which needs to be updated
+ * @param mod the data object modification
* @param tx Transaction
* @throws ReadFailedException Exception thrown if read fails
* @throws ExecutionException Exception thrown if Execution fail
public void copyHAGlobalUpdateToChild(Node haUpdated,
Node haOriginal,
InstanceIdentifier<Node> haChildNodeId,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx)
throws InterruptedException, ExecutionException, ReadFailedException {
-
- Node existingNode = HwvtepHAUtil.readNode(tx, LogicalDatastoreType.CONFIGURATION, haChildNodeId);
- HwvtepGlobalAugmentation updatedGlobal = HwvtepHAUtil.getGlobalAugmentationOfNode(haUpdated);
- HwvtepGlobalAugmentation origGlobal = HwvtepHAUtil.getGlobalAugmentationOfNode(haOriginal);
- HwvtepGlobalAugmentation existingData = HwvtepHAUtil.getGlobalAugmentationOfNode(existingNode);
-
- globalAugmentationMerger.mergeConfigUpdate(existingData, updatedGlobal, origGlobal, haChildNodeId, tx);
- globalNodeMerger.mergeConfigUpdate(existingNode, haUpdated, haOriginal, haChildNodeId, tx);
+ globalAugmentationMerger.mergeConfigUpdate(haChildNodeId,
+ mod.getModifiedAugmentation(HwvtepGlobalAugmentation.class), tx);
+ globalNodeMerger.mergeConfigUpdate(haChildNodeId, mod, tx);
}
/**
* @param haUpdated HA node updated
* @param haOriginal HA node original
* @param haChildNodeId HA child node which needs to be updated
+ * @param mod the data object modification
* @param tx Transaction
* @throws ReadFailedException Exception thrown if read fails
* @throws ExecutionException Exception thrown if Execution fail
public void copyHAPSUpdateToChild(Node haUpdated,
Node haOriginal,
InstanceIdentifier<Node> haChildNodeId,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx)
throws InterruptedException, ExecutionException, ReadFailedException {
- Node existingNode = HwvtepHAUtil.readNode(tx, LogicalDatastoreType.CONFIGURATION, haChildNodeId);
-
- PhysicalSwitchAugmentation updated = HwvtepHAUtil.getPhysicalSwitchAugmentationOfNode(haUpdated);
- PhysicalSwitchAugmentation orig = HwvtepHAUtil.getPhysicalSwitchAugmentationOfNode(haOriginal);
- PhysicalSwitchAugmentation existingData = HwvtepHAUtil.getPhysicalSwitchAugmentationOfNode(existingNode);
-
- psAugmentationMerger.mergeConfigUpdate(existingData, updated, orig, haChildNodeId, tx);
- psNodeMerger.mergeConfigUpdate(existingNode, haUpdated, haOriginal, haChildNodeId, tx);
+ psAugmentationMerger.mergeConfigUpdate(haChildNodeId,
+ mod.getModifiedAugmentation(PhysicalSwitchAugmentation.class), tx);
+ psNodeMerger.mergeConfigUpdate(haChildNodeId, mod, tx);
}
}
import javax.inject.Inject;
import javax.inject.Singleton;
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.api.DataObjectModification;
import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
public void copyChildGlobalOpUpdateToHAParent(Node updatedSrcNode,
Node origSrcNode,
InstanceIdentifier<Node> haPath,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx) throws ReadFailedException {
if (haPath == null) {
return;
}
- opNodeUpdatedHandler.copyChildGlobalOpUpdateToHAParent(updatedSrcNode, origSrcNode, haPath, tx);
+ opNodeUpdatedHandler.copyChildGlobalOpUpdateToHAParent(updatedSrcNode, origSrcNode, haPath, mod, tx);
}
@Override
public void copyChildPsOpUpdateToHAParent(Node updatedSrcPSNode,
Node origSrcPSNode,
InstanceIdentifier<Node> haPath,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx) throws ReadFailedException {
if (haPath == null) {
return;
}
- opNodeUpdatedHandler.copyChildPsOpUpdateToHAParent(updatedSrcPSNode, origSrcPSNode, haPath, tx);
+ opNodeUpdatedHandler.copyChildPsOpUpdateToHAParent(updatedSrcPSNode, origSrcPSNode, haPath, mod, tx);
}
@Override
public void copyHAPSUpdateToChild(Node haUpdated,
Node haOriginal,
InstanceIdentifier<Node> haChildNodeId,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx)
throws InterruptedException, ExecutionException, ReadFailedException {
if (haChildNodeId == null) {
return;
}
- configNodeUpdatedHandler.copyHAPSUpdateToChild(haUpdated, haOriginal, haChildNodeId, tx);
+ configNodeUpdatedHandler.copyHAPSUpdateToChild(haUpdated, haOriginal, haChildNodeId, mod, tx);
}
@Override
public void copyHAGlobalUpdateToChild(Node haUpdated,
Node haOriginal,
InstanceIdentifier<Node> haChildNodeId,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx)
throws InterruptedException, ExecutionException, ReadFailedException {
if (haChildNodeId == null) {
return;
}
- configNodeUpdatedHandler.copyHAGlobalUpdateToChild(haUpdated, haOriginal, haChildNodeId, tx);
+ configNodeUpdatedHandler.copyHAGlobalUpdateToChild(haUpdated, haOriginal, haChildNodeId, mod, tx);
}
}
import com.google.common.base.Optional;
import java.util.concurrent.ExecutionException;
+
+import org.opendaylight.controller.md.sal.binding.api.DataObjectModification;
import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
void copyChildGlobalOpUpdateToHAParent(Node updatedSrcNode,
Node origSrcNode,
InstanceIdentifier<Node> haPath,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx) throws ReadFailedException;
void copyChildPsOpUpdateToHAParent(Node updatedSrcPSNode,
Node origSrcPSNode,
InstanceIdentifier<Node> haPath,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx) throws ReadFailedException;
void copyHAPSUpdateToChild(Node haUpdated,
Node haOriginal,
InstanceIdentifier<Node> haChildPath,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx)
throws InterruptedException, ExecutionException, ReadFailedException;
void copyHAGlobalUpdateToChild(Node haUpdated,
Node haOriginal,
InstanceIdentifier<Node> haChildPath,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx)
throws InterruptedException, ExecutionException, ReadFailedException;
}
\ No newline at end of file
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.hwvtep.rev150901.HwvtepGlobalRef;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.hwvtep.rev150901.PhysicalSwitchAugmentation;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.hwvtep.rev150901.PhysicalSwitchAugmentationBuilder;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.hwvtep.rev150901.hwvtep.global.attributes.Managers;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.NodeBuilder;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
if (OPERATIONAL == logicalDatastoreType) {
haBuilder.setManagers(HwvtepHAUtil.buildManagersForHANode(srcGlobalNodeOptional.get(),
existingDstGlobalNodeOptional));
+ //Also update the manager section in config which helps in cluster reboot scenarios
+ haBuilder.getManagers().stream().forEach((manager) -> {
+ InstanceIdentifier<Managers> managerIid = dstPath.augmentation(HwvtepGlobalAugmentation.class)
+ .child(Managers.class, manager.getKey());
+ tx.put(CONFIGURATION, managerIid, manager, true);
+ });
+
}
+ haBuilder.setDbVersion(srcGlobalAugmentation.getDbVersion());
haNodeBuilder.addAugmentation(HwvtepGlobalAugmentation.class, haBuilder.build());
Node haNode = haNodeBuilder.build();
- tx.merge(logicalDatastoreType, dstPath, haNode, true);
+ if (OPERATIONAL == logicalDatastoreType) {
+ tx.merge(logicalDatastoreType, dstPath, haNode, true);
+ } else {
+ tx.put(logicalDatastoreType, dstPath, haNode, true);
+ }
}
public void copyPSNode(Optional<Node> srcPsNodeOptional,
dstPsNodeBuilder.addAugmentation(PhysicalSwitchAugmentation.class, dstPsAugmentationBuilder.build());
Node dstPsNode = dstPsNodeBuilder.build();
tx.merge(logicalDatastoreType, dstPsPath, dstPsNode, true);
+ LOG.debug("Copied {} physical switch node from {} to {}", logicalDatastoreType, srcPsPath, dstPsPath);
}
public void mergeOpManagedByAttributes(PhysicalSwitchAugmentation psAugmentation,
PhysicalSwitchAugmentationBuilder builder,
InstanceIdentifier<Node> haNodePath) {
builder.setManagedBy(new HwvtepGlobalRef(haNodePath));
- builder.setHwvtepNodeName(psAugmentation.getHwvtepNodeName());
- builder.setHwvtepNodeDescription(psAugmentation.getHwvtepNodeDescription());
- builder.setTunnelIps(psAugmentation.getTunnelIps());
- builder.setPhysicalSwitchUuid(HwvtepHAUtil.getUUid(psAugmentation.getHwvtepNodeName().getValue()));
+ if (psAugmentation != null) {
+ builder.setHwvtepNodeName(psAugmentation.getHwvtepNodeName());
+ builder.setHwvtepNodeDescription(psAugmentation.getHwvtepNodeDescription());
+ builder.setTunnelIps(psAugmentation.getTunnelIps());
+ if (psAugmentation.getHwvtepNodeName() != null) {
+ builder.setPhysicalSwitchUuid(HwvtepHAUtil.getUUid(psAugmentation.getHwvtepNodeName().getValue()));
+ }
+ }
}
}
*/
package org.opendaylight.netvirt.elan.l2gw.ha.handlers;
+import org.opendaylight.controller.md.sal.binding.api.DataObjectModification;
import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
-import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import org.opendaylight.netvirt.elan.l2gw.ha.HwvtepHAUtil;
import org.opendaylight.netvirt.elan.l2gw.ha.merge.GlobalAugmentationMerger;
GlobalNodeMerger globalNodeMerger = GlobalNodeMerger.getInstance();
PSNodeMerger psNodeMerger = PSNodeMerger.getInstance();
- public void handle(Node updatedSrcNode, Node origSrcNode, InstanceIdentifier<Node> haPath, ReadWriteTransaction tx)
- throws ReadFailedException {
- if (updatedSrcNode.getAugmentation(HwvtepGlobalAugmentation.class) != null) {
- copyChildGlobalOpUpdateToHAParent(updatedSrcNode, origSrcNode, haPath, tx);
- } else {
- copyChildPsOpUpdateToHAParent(updatedSrcNode, origSrcNode, haPath, tx);
- }
- }
-
/**
* Copy HA ps node update to HA child ps node of operational data tree.
*
* @param updatedSrcPSNode Updated HA child ps node
* @param origSrcPSNode Original HA ps node
* @param haPath HA node path
+ * @param mod the data object modification
* @param tx Transaction
* @throws ReadFailedException Exception thrown if read fails
*/
public void copyChildPsOpUpdateToHAParent(Node updatedSrcPSNode,
Node origSrcPSNode,
InstanceIdentifier<Node> haPath,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx) throws ReadFailedException {
InstanceIdentifier<Node> haPSPath = HwvtepHAUtil.convertPsPath(updatedSrcPSNode, haPath);
- Node existingHAPSNode = HwvtepHAUtil.readNode(tx, LogicalDatastoreType.OPERATIONAL, haPSPath);
- PhysicalSwitchAugmentation updatedSrc = HwvtepHAUtil.getPhysicalSwitchAugmentationOfNode(updatedSrcPSNode);
- PhysicalSwitchAugmentation origSrc = HwvtepHAUtil.getPhysicalSwitchAugmentationOfNode(origSrcPSNode);
- PhysicalSwitchAugmentation existingData = HwvtepHAUtil.getPhysicalSwitchAugmentationOfNode(existingHAPSNode);
-
- psAugmentationMerger.mergeOpUpdate(existingData, updatedSrc, origSrc, haPSPath, tx);
- psNodeMerger.mergeOpUpdate(existingHAPSNode, updatedSrcPSNode, origSrcPSNode, haPSPath, tx);
+ psAugmentationMerger.mergeOpUpdate(haPSPath,
+ mod.getModifiedAugmentation(PhysicalSwitchAugmentation.class), tx);
+ psNodeMerger.mergeOpUpdate(haPSPath, mod, tx);
}
/**
* @param updatedSrcNode Updated HA child node
* @param origSrcNode Original HA node
* @param haPath HA node path
+ * @param mod the data object modification
* @param tx Transaction
* @throws ReadFailedException Exception thrown if read fails
*/
public void copyChildGlobalOpUpdateToHAParent(Node updatedSrcNode,
Node origSrcNode,
InstanceIdentifier<Node> haPath,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx) throws ReadFailedException {
- Node existingDstNode = HwvtepHAUtil.readNode(tx, LogicalDatastoreType.OPERATIONAL, haPath);
- if (existingDstNode == null) {
- //No dst present nothing to copy
- return;
- }
- HwvtepGlobalAugmentation existingData = HwvtepHAUtil.getGlobalAugmentationOfNode(existingDstNode);
- HwvtepGlobalAugmentation updatedSrc = HwvtepHAUtil.getGlobalAugmentationOfNode(updatedSrcNode);
- HwvtepGlobalAugmentation origSrc = HwvtepHAUtil.getGlobalAugmentationOfNode(origSrcNode);
-
- globalAugmentationMerger.mergeOpUpdate(existingData, updatedSrc, origSrc, haPath, tx);
- globalNodeMerger.mergeOpUpdate(existingDstNode, updatedSrcNode, origSrcNode, haPath, tx);
+ globalAugmentationMerger.mergeOpUpdate(haPath,
+ mod.getModifiedAugmentation(HwvtepGlobalAugmentation.class), tx);
+ globalNodeMerger.mergeOpUpdate(haPath, mod, tx);
}
}
import static org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType.CONFIGURATION;
+import com.google.common.base.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.api.DataObjectModification;
import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import org.opendaylight.netvirt.elan.l2gw.ha.HwvtepHAUtil;
import org.opendaylight.netvirt.elan.l2gw.ha.handlers.HAEventHandler;
import org.opendaylight.netvirt.elan.l2gw.ha.handlers.IHAEventHandler;
+import org.opendaylight.netvirt.elan.l2gw.ha.handlers.NodeCopier;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
@Singleton
public class HAConfigNodeListener extends HwvtepNodeBaseListener {
private final IHAEventHandler haEventHandler;
+ private final NodeCopier nodeCopier;
@Inject
- public HAConfigNodeListener(DataBroker db, HAEventHandler haEventHandler) throws Exception {
+ public HAConfigNodeListener(DataBroker db, HAEventHandler haEventHandler,
+ NodeCopier nodeCopier) throws Exception {
super(LogicalDatastoreType.CONFIGURATION, db);
this.haEventHandler = haEventHandler;
+ this.nodeCopier = nodeCopier;
}
@Override
- void onPsNodeAdd(InstanceIdentifier<Node> key,
+ void onPsNodeAdd(InstanceIdentifier<Node> haPsPath,
Node haPSNode,
ReadWriteTransaction tx) throws InterruptedException, ExecutionException, ReadFailedException {
//copy the ps node data to children
String psId = haPSNode.getNodeId().getValue();
Set<InstanceIdentifier<Node>> childSwitchIds = HwvtepHAUtil.getPSChildrenIdsForHAPSNode(psId);
- for (InstanceIdentifier<Node> childSwitchId : childSwitchIds) {
- haEventHandler.copyHAPSUpdateToChild(haPSNode, null/*haOriginal*/, childSwitchId, tx);
+ if (childSwitchIds.isEmpty()) {
+ LOG.error("Failed to find any ha children {}", haPsPath);
+ return;
+ }
+ for (InstanceIdentifier<Node> childPsPath : childSwitchIds) {
+ String nodeId =
+ HwvtepHAUtil.convertToGlobalNodeId(childPsPath.firstKeyOf(Node.class).getNodeId().getValue());
+ InstanceIdentifier<Node> childGlobalPath = HwvtepHAUtil.convertToInstanceIdentifier(nodeId);
+ nodeCopier.copyPSNode(Optional.fromNullable(haPSNode), haPsPath, childPsPath, childGlobalPath,
+ LogicalDatastoreType.CONFIGURATION, tx);
}
LOG.trace("Handle config ps node add {}", psId);
}
void onPsNodeUpdate(InstanceIdentifier<Node> key,
Node haPSUpdated,
Node haPSOriginal,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx) throws InterruptedException, ExecutionException, ReadFailedException {
//copy the ps node data to children
String psId = haPSUpdated.getNodeId().getValue();
Set<InstanceIdentifier<Node>> childSwitchIds = HwvtepHAUtil.getPSChildrenIdsForHAPSNode(psId);
for (InstanceIdentifier<Node> childSwitchId : childSwitchIds) {
- haEventHandler.copyHAPSUpdateToChild(haPSUpdated, haPSOriginal, childSwitchId, tx);
+ haEventHandler.copyHAPSUpdateToChild(haPSUpdated, haPSOriginal, childSwitchId, mod, tx);
}
}
void onGlobalNodeUpdate(InstanceIdentifier<Node> key,
Node haUpdated,
Node haOriginal,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx)
throws InterruptedException, ExecutionException, ReadFailedException {
- //copy the ha node data to children taken care of the HAListeners
- /*
Set<InstanceIdentifier<Node>> childNodeIds = hwvtepHACache.getChildrenForHANode(key);
for (InstanceIdentifier<Node> haChildNodeId : childNodeIds) {
- haEventHandler.copyHAGlobalUpdateToChild(haUpdated, haOriginal, haChildNodeId, tx);
+ haEventHandler.copyHAGlobalUpdateToChild(haUpdated, haOriginal, haChildNodeId, mod, tx);
}
- */
}
@Override
import javax.inject.Singleton;
import org.opendaylight.controller.md.sal.binding.api.ClusteredDataTreeChangeListener;
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.api.DataObjectModification;
import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
void onGlobalNodeUpdate(InstanceIdentifier<Node> childPath,
Node updatedChildNode,
Node beforeChildNode,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx) {
boolean wasHAChild = hwvtepHACache.isHAEnabledDevice(childPath);
addToHACacheIfBecameHAChild(childPath, updatedChildNode, beforeChildNode, tx);
import javax.inject.Singleton;
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.api.DataObjectModification;
import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
void onGlobalNodeUpdate(InstanceIdentifier<Node> childGlobalPath,
Node updatedChildNode,
Node originalChildNode,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx) throws ReadFailedException {
String oldHAId = HwvtepHAUtil.getHAIdFromManagerOtherConfig(originalChildNode);
if (!Strings.isNullOrEmpty(oldHAId)) { //was already ha child
InstanceIdentifier<Node> haPath = hwvtepHACache.getParent(childGlobalPath);
LOG.debug("Copy oper update from child {} to parent {}", childGlobalPath, haPath);
- haEventHandler.copyChildGlobalOpUpdateToHAParent(updatedChildNode, originalChildNode, haPath, tx);
+ haEventHandler.copyChildGlobalOpUpdateToHAParent(updatedChildNode, originalChildNode, haPath, mod, tx);
return;//TODO handle unha case
}
void onPsNodeUpdate(InstanceIdentifier<Node> childPsPath,
Node updatedChildPSNode,
Node originalChildPSNode,
+ DataObjectModification<Node> mod,
ReadWriteTransaction tx) throws ReadFailedException {
InstanceIdentifier<Node> childGlobalPath = HwvtepHAUtil.getGlobalNodePathFromPSNode(updatedChildPSNode);
if (IS_NOT_HA_CHILD.test(childGlobalPath)) {
return;
}
InstanceIdentifier<Node> haGlobalPath = hwvtepHACache.getParent(childGlobalPath);
- InstanceIdentifier<Node> haPsPath = HwvtepHAUtil.convertPsPath(updatedChildPSNode, haGlobalPath);
- LOG.error("Copy oper ps update from child {} to parent {}", childPsPath, haPsPath);
- haEventHandler.copyChildPsOpUpdateToHAParent(updatedChildPSNode, originalChildPSNode, haPsPath, tx);
+ haEventHandler.copyChildPsOpUpdateToHAParent(updatedChildPSNode, originalChildPSNode, haGlobalPath, mod, tx);
}
@Override
Node childNode,
InstanceIdentifier<Node> haNodePath,
ReadWriteTransaction tx) {
- LOG.error("Inside readAndCopyChildPsOpToParent");
String childGlobalNodeId = childNode.getNodeId().getValue();
List<InstanceIdentifier> childPsIids = new ArrayList<>();
HwvtepGlobalAugmentation hwvtepGlobalAugmentation = childNode.getAugmentation(HwvtepGlobalAugmentation.class);
if (childPsIids.isEmpty()) {
LOG.info("No child ps found for global {}", childGlobalNodeId);
}
- LOG.error("Got child PS node of size {}", childPsIids.size());
childPsIids.forEach((psIid) -> {
try {
InstanceIdentifier<Node> childPsIid = psIid;
Node original = HwvtepHAUtil.getOriginal(mod);
if (updated != null && original != null) {
if (!nodeId.contains(HwvtepHAUtil.PHYSICALSWITCH)) {
- onGlobalNodeUpdate(key, updated, original, tx);
+ onGlobalNodeUpdate(key, updated, original, mod, tx);
} else {
- onPsNodeUpdate(key, updated, original, tx);
+ onPsNodeUpdate(key, updated, original, mod, tx);
}
}
}
}
- void onGlobalNodeUpdate(InstanceIdentifier<Node> key, Node updated, Node original, ReadWriteTransaction tx)
+ void onGlobalNodeUpdate(InstanceIdentifier<Node> key, Node updated, Node original,
+ DataObjectModification<Node> mod, ReadWriteTransaction tx)
throws ReadFailedException, InterruptedException, ExecutionException {
}
- void onPsNodeUpdate(InstanceIdentifier<Node> key, Node updated, Node original, ReadWriteTransaction tx)
+ void onPsNodeUpdate(InstanceIdentifier<Node> key, Node updated, Node original,
+ DataObjectModification<Node> mod, ReadWriteTransaction tx)
throws ReadFailedException, InterruptedException, ExecutionException {
}
this.mergeCommand = mergeCommand;
this.datastoreType = datastoreType;
this.dataTypeName = getClassTypeName();
- registerListener(this.datastoreType.getDatastoreType() , broker);
+ //registerListener(this.datastoreType.getDatastoreType() , broker);
}
@Override
--- /dev/null
+/*
+ * Copyright (c) 2018 Ericsson India Global Services Pvt Ltd. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.netvirt.elan.l2gw.ha.listeners;
+
+import java.util.Arrays;
+import javax.annotation.PostConstruct;
+import javax.inject.Inject;
+import javax.inject.Singleton;
+
+import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.genius.datastoreutils.AsyncClusteredDataTreeChangeListenerBase;
+import org.opendaylight.genius.utils.hwvtep.HwvtepHACache;
+import org.opendaylight.genius.utils.hwvtep.HwvtepSouthboundUtils;
+import org.opendaylight.netvirt.elan.l2gw.ha.HwvtepHAUtil;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.hwvtep.rev150901.HwvtepGlobalAugmentation;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.hwvtep.rev150901.hwvtep.global.attributes.Managers;
+import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
+import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Singleton
+public final class ManagerListener extends AsyncClusteredDataTreeChangeListenerBase<Managers, ManagerListener> {
+
+ private static final Logger LOG = LoggerFactory.getLogger(HAOpClusteredListener.class);
+
+ private final DataBroker dataBroker;
+
+ @Inject
+ public ManagerListener(DataBroker dataBroker) {
+ this.dataBroker = dataBroker;
+ }
+
+ @PostConstruct
+ public void init() {
+ registerListener(LogicalDatastoreType.CONFIGURATION, dataBroker);
+ }
+
+ @Override
+ protected InstanceIdentifier<Managers> getWildCardPath() {
+ return HwvtepSouthboundUtils.createHwvtepTopologyInstanceIdentifier()
+ .child(Node.class)
+ .augmentation(HwvtepGlobalAugmentation.class)
+ .child(Managers.class);
+ }
+
+ @Override
+ protected void remove(InstanceIdentifier<Managers> key, Managers managers) {
+ }
+
+ @Override
+ protected void update(InstanceIdentifier<Managers> key, Managers before, Managers after) {
+ }
+
+ @Override
+ protected void add(InstanceIdentifier<Managers> key, Managers managers) {
+ InstanceIdentifier<Node> parent = key.firstIdentifierOf(Node.class);
+ if (managers.getKey().getTarget().getValue().contains(HwvtepHAUtil.MANAGER_KEY)
+ && managers.getManagerOtherConfigs() != null) {
+ managers.getManagerOtherConfigs().stream()
+ .filter(otherConfig -> otherConfig.getKey().getOtherConfigKey().contains(HwvtepHAUtil.HA_CHILDREN))
+ .flatMap(otherConfig -> Arrays.asList(otherConfig.getOtherConfigValue().split(",")).stream())
+ .map(HwvtepHAUtil::convertToInstanceIdentifier)
+ .forEach(childIid -> HwvtepHACache.getInstance().addChild(parent, childIid));
+ }
+ }
+
+ @Override
+ protected ManagerListener getDataTreeChangeListener() {
+ return this;
+ }
+}
extends MergeCommandsAggregator {
private GlobalAugmentationMerger() {
- commands.add(new RemoteMcastCmd());
- commands.add(new RemoteUcastCmd());
- commands.add(new LocalUcastCmd());
- commands.add(new LocalMcastCmd());
- commands.add(new LogicalSwitchesCmd());
- commands.add(new SwitchesCmd());
+ addCommand(new RemoteMcastCmd());
+ addCommand(new RemoteUcastCmd());
+ addCommand(new LocalUcastCmd());
+ addCommand(new LocalMcastCmd());
+ addCommand(new LogicalSwitchesCmd());
+ addCommand(new SwitchesCmd());
}
static GlobalAugmentationMerger instance = new GlobalAugmentationMerger();
public class GlobalNodeMerger extends MergeCommandsAggregator {
public GlobalNodeMerger() {
- commands.add(new PhysicalLocatorCmd());
+ addCommand(new PhysicalLocatorCmd());
}
static GlobalNodeMerger instance = new GlobalNodeMerger();
*/
package org.opendaylight.netvirt.elan.l2gw.ha.merge;
-import java.util.ArrayList;
-import java.util.List;
+import static org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType.CONFIGURATION;
+import static org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType.OPERATIONAL;
+
+import com.google.common.base.Optional;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+import java.util.function.BiPredicate;
+
+import org.opendaylight.controller.md.sal.binding.api.DataObjectModification;
import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
+import org.opendaylight.genius.utils.SuperTypeUtil;
+import org.opendaylight.netvirt.elan.l2gw.ha.commands.LocalUcastCmd;
import org.opendaylight.netvirt.elan.l2gw.ha.commands.MergeCommand;
+import org.opendaylight.netvirt.elan.l2gw.ha.commands.RemoteUcastCmd;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
import org.opendaylight.yangtools.concepts.Builder;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public abstract class MergeCommandsAggregator<BuilderTypeT extends Builder, AugTypeT extends DataObject> {
+ public static final Logger LOG = LoggerFactory.getLogger(MergeCommandsAggregator.class);
+
+ protected Map<Class<?>, MergeCommand> commands = new HashMap<>();
+
+ private final BiPredicate<LogicalDatastoreType, Class> skipCopy = (dsType, cmdType) -> {
+ return (dsType == CONFIGURATION ? commands.get(cmdType) instanceof LocalUcastCmd :
+ commands.get(cmdType) instanceof RemoteUcastCmd);
+ };
+
protected MergeCommandsAggregator() {
}
- List<MergeCommand> commands = new ArrayList<>();
+ protected void addCommand(MergeCommand mergeCommand) {
+ commands.put(SuperTypeUtil.getTypeParameter(mergeCommand.getClass(), 0), mergeCommand);
+ }
public void mergeOperationalData(BuilderTypeT builder,
AugTypeT existingData,
AugTypeT src,
InstanceIdentifier<Node> dstPath) {
- for (MergeCommand cmd : commands) {
- cmd.mergeOperationalData(builder, existingData, src, dstPath);
+ for (MergeCommand cmd : commands.values()) {
+ if (skipCopy.negate().test(OPERATIONAL, cmd.getClass())) {
+ cmd.mergeOperationalData(builder, existingData, src, dstPath);
+ }
}
}
public void mergeConfigData(BuilderTypeT builder,
AugTypeT src,
InstanceIdentifier<Node> dstPath) {
- for (MergeCommand cmd : commands) {
- cmd.mergeConfigData(builder, src, dstPath);
+ for (MergeCommand cmd : commands.values()) {
+ if (skipCopy.negate().test(CONFIGURATION, cmd.getClass())) {
+ cmd.mergeConfigData(builder, src, dstPath);
+ }
}
}
- public void mergeConfigUpdate(AugTypeT existingData,
- AugTypeT updated,
- AugTypeT orig,
- InstanceIdentifier<Node> dstPath,
+ public void mergeConfigUpdate(InstanceIdentifier<Node> dstPath,
+ DataObjectModification mod,
ReadWriteTransaction tx) {
- for (MergeCommand cmd : commands) {
- cmd.mergeConfigUpdate(existingData, updated, orig, dstPath, tx);
- }
+ mergeUpdate(dstPath, mod, CONFIGURATION, tx);
}
- public void mergeOpUpdate(AugTypeT existingData,
- AugTypeT updatedSrc,
- AugTypeT origSrc,
- InstanceIdentifier<Node> dstPath,
+ public void mergeOpUpdate(InstanceIdentifier<Node> dstPath,
+ DataObjectModification mod,
ReadWriteTransaction tx) {
- for (MergeCommand cmd : commands) {
- cmd.mergeOpUpdate(existingData, updatedSrc, origSrc, dstPath, tx);
+ mergeUpdate(dstPath, mod, OPERATIONAL, tx);
+ }
+
+ public void mergeUpdate(InstanceIdentifier<Node> dstPath,
+ DataObjectModification mod,
+ LogicalDatastoreType datastoreType,
+ ReadWriteTransaction tx) {
+ if (mod == null) {
+ return;
}
+ Collection<DataObjectModification> modifications = mod.getModifiedChildren();
+ modifications.stream()
+ .filter(modification -> skipCopy.negate().test(datastoreType, modification.getDataType()))
+ .filter(modification -> commands.get(modification.getDataType()) != null)
+ .peek(modification -> LOG.debug("Received {} modification {} copy/delete to {}",
+ datastoreType, modification, dstPath))
+ .forEach(modification -> {
+ MergeCommand mergeCommand = commands.get(modification.getDataType());
+ boolean create = modification.getDataAfter() != null;
+ DataObject data = create ? modification.getDataAfter() : modification.getDataBefore();
+ InstanceIdentifier<DataObject> transformedId = mergeCommand.generateId(dstPath, data);
+ DataObject transformedItem = mergeCommand.transform(dstPath, data);
+
+ Optional<DataObject> existingDataOptional = null;
+ try {
+ existingDataOptional = tx.read(datastoreType, transformedId).checkedGet();
+ } catch (ReadFailedException ex) {
+ LOG.error("Failed to read data {} from {}", transformedId, datastoreType);
+ return;
+ }
+
+ String destination = datastoreType == CONFIGURATION ? "child" : "parent";
+ if (create) {
+ if (isDataUpdated(existingDataOptional, data)) {
+ LOG.debug("Copy to {} {} {}", destination, datastoreType, transformedId);
+ tx.put(datastoreType, transformedId, data, true);
+ } else {
+ LOG.debug("Data not updated skip copy to {}", transformedId);
+ }
+ } else {
+ if (existingDataOptional.isPresent()) {
+ LOG.debug("Delete from {} {} {}", destination, datastoreType, transformedId);
+ tx.delete(datastoreType, transformedId);
+ } else {
+ LOG.debug("Delete skipped for {}", transformedId);
+ }
+ }
+ });
+ }
+
+ boolean isDataUpdated(Optional<DataObject> existingDataOptional, DataObject newData) {
+ return !existingDataOptional.isPresent() || !Objects.equals(existingDataOptional.get(), newData);
}
}
public class PSAugmentationMerger
extends MergeCommandsAggregator {
public PSAugmentationMerger() {
- commands.add(new TunnelCmd());
- commands.add(new TunnelIpCmd());
+ addCommand(new TunnelCmd());
+ addCommand(new TunnelIpCmd());
}
static PSAugmentationMerger instance = new PSAugmentationMerger();
public class PSNodeMerger extends MergeCommandsAggregator {
public PSNodeMerger() {
- commands.add(new TerminationPointCmd());
+ addCommand(new TerminationPointCmd());
}
static PSNodeMerger instance = new PSNodeMerger();