import org.opendaylight.genius.interfacemanager.recovery.impl.InterfaceServiceRecoveryHandler;
import org.opendaylight.genius.interfacemanager.renderer.hwvtep.statehelpers.HwVTEPInterfaceStateRemoveHelper;
import org.opendaylight.genius.interfacemanager.renderer.hwvtep.statehelpers.HwVTEPInterfaceStateUpdateHelper;
-import org.opendaylight.genius.utils.hwvtep.HwvtepNodeHACache;
import org.opendaylight.infrautils.jobcoordinator.JobCoordinator;
import org.opendaylight.mdsal.binding.api.DataBroker;
import org.opendaylight.mdsal.binding.util.ManagedNewTransactionRunner;
public HwVTEPTunnelsStateListener(@Reference DataBroker dataBroker,
@Reference JobCoordinator coordinator,
InterfaceServiceRecoveryHandler interfaceServiceRecoveryHandler,
- @Reference ServiceRecoveryRegistry serviceRecoveryRegistry,
- @Reference HwvtepNodeHACache hwvtepNodeHACache) {
+ @Reference ServiceRecoveryRegistry serviceRecoveryRegistry) {
super(dataBroker, LogicalDatastoreType.OPERATIONAL,
InstanceIdentifier.builder(NetworkTopology.class).child(Topology.class).child(Node.class)
.augmentation(PhysicalSwitchAugmentation.class).child(Tunnels.class).build());
import org.opendaylight.genius.mdsalutil.interfaces.testutils.TestIMdsalApiManager;
import org.opendaylight.genius.mdsalutil.testutils.TestShardStatusMonitor;
import org.opendaylight.genius.utils.clustering.EntityOwnershipUtils;
-import org.opendaylight.genius.utils.hwvtep.HwvtepNodeHACache;
+import org.opendaylight.genius.utils.hwvtep.HwvtepHACache;
import org.opendaylight.infrautils.caches.CacheProvider;
import org.opendaylight.infrautils.inject.guice.testutils.AbstractGuiceJsr250Module;
import org.opendaylight.mdsal.binding.api.DataBroker;
bind(TerminationPointStateListener.class);
bind(VlanMemberConfigListener.class);
bind(InterfaceStateListener.class);
- bind(HwvtepNodeHACache.class).toInstance(mock(HwvtepNodeHACache.class));
+ bind(HwvtepHACache.class).toInstance(mock(HwvtepHACache.class));
bind(IfmConfig.class).toInstance(mock(IfmConfig.class));
bind(CacheProvider.class).toInstance(mock(CacheProvider.class));
bind(IfmDiagStatusProvider.class).toInstance(mock(IfmDiagStatusProvider.class));
package org.opendaylight.genius.datastoreutils.hwvtep;
import java.util.concurrent.ExecutorService;
-import org.opendaylight.genius.utils.hwvtep.HwvtepNodeHACache;
+import org.opendaylight.genius.utils.hwvtep.HwvtepHACache;
import org.opendaylight.mdsal.binding.api.DataBroker;
import org.opendaylight.mdsal.binding.api.DataTreeChangeListener;
import org.opendaylight.mdsal.binding.api.DataTreeIdentifier;
public abstract class HwvtepAbstractDataTreeChangeListener<T extends DataObject,K extends DataTreeChangeListener<T>>
extends AbstractAsyncDataTreeChangeListener<T> {
- private final HwvtepNodeHACache hwvtepNodeHACache;
-
public HwvtepAbstractDataTreeChangeListener(DataBroker dataBroker,DataTreeIdentifier dataTreeIdentifier,
- ExecutorService executorService, HwvtepNodeHACache hwvtepNodeHACache) {
+ ExecutorService executorService) {
super(dataBroker, dataTreeIdentifier, executorService);
- this.hwvtepNodeHACache = hwvtepNodeHACache;
}
@Override
public void remove(InstanceIdentifier<T> identifier, T del) {
- if (hwvtepNodeHACache.isHAEnabledDevice(identifier)) {
+ if (HwvtepHACache.getInstance().isHAEnabledDevice(identifier)) {
return;
}
removed(identifier, del);
@Override
public void update(InstanceIdentifier<T> identifier, T original, T update) {
- if (hwvtepNodeHACache.isHAEnabledDevice(identifier)) {
+ if (HwvtepHACache.getInstance().isHAEnabledDevice(identifier)) {
return;
}
updated(identifier,original,update);
@Override
public void add(InstanceIdentifier<T> identifier, T add) {
- if (hwvtepNodeHACache.isHAEnabledDevice(identifier)) {
+ if (HwvtepHACache.getInstance().isHAEnabledDevice(identifier)) {
return;
}
added(identifier,add);
package org.opendaylight.genius.datastoreutils.hwvtep;
import java.util.concurrent.ExecutorService;
-import org.opendaylight.genius.utils.hwvtep.HwvtepNodeHACache;
+import org.opendaylight.genius.utils.hwvtep.HwvtepHACache;
import org.opendaylight.mdsal.binding.api.ClusteredDataTreeChangeListener;
import org.opendaylight.mdsal.binding.api.DataBroker;
import org.opendaylight.mdsal.binding.api.DataTreeIdentifier;
T extends DataObject, K extends ClusteredDataTreeChangeListener<T>>
extends AbstractClusteredAsyncDataTreeChangeListener<T> {
- private final HwvtepNodeHACache hwvtepNodeHACache;
+ private final HwvtepHACache hwvtepHACache;
public HwvtepClusteredDataTreeChangeListener(DataBroker dataBroker, DataTreeIdentifier dataTreeIdentifier,
- ExecutorService executorService, HwvtepNodeHACache hwvtepNodeHACache) {
+ ExecutorService executorService, HwvtepHACache hwvtepHACache) {
super(dataBroker, dataTreeIdentifier, executorService);
- this.hwvtepNodeHACache = hwvtepNodeHACache;
+ this.hwvtepHACache = hwvtepHACache;
}
@Override
public void remove(InstanceIdentifier<T> identifier, T del) {
- if (hwvtepNodeHACache.isHAEnabledDevice(identifier)) {
+ if (hwvtepHACache.isHAEnabledDevice(identifier)) {
return;
}
removed(identifier,del);
@Override
public void update(InstanceIdentifier<T> identifier, T original, T update) {
- if (hwvtepNodeHACache.isHAEnabledDevice(identifier)) {
+ if (hwvtepHACache.isHAEnabledDevice(identifier)) {
return;
}
updated(identifier,original, update);
@Override
public void add(InstanceIdentifier<T> identifier, T add) {
- if (hwvtepNodeHACache.isHAEnabledDevice(identifier)) {
+ if (hwvtepHACache.isHAEnabledDevice(identifier)) {
return;
}
added(identifier, add);
--- /dev/null
+/*
+ * Copyright (c) 2016 Ericsson India Global Services Pvt Ltd. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.genius.utils.hwvtep;
+
+public abstract class DebugEvent {
+
+ private final long eventTimeStamp;
+
+ public DebugEvent() {
+ this.eventTimeStamp = System.currentTimeMillis();
+ }
+
+ public long getEventTimeStamp() {
+ return eventTimeStamp;
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2020 Ericsson India Global Services Pvt Ltd. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.genius.utils.hwvtep;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.LinkedBlockingQueue;
+import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev130715.IpAddress;
+import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
+import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class HwvtepHACache {
+
+ private static final Logger LOG = LoggerFactory.getLogger(HwvtepHACache.class);
+
+ private static final int MAX_EVENT_BUFFER_SIZE = 500000;
+ private static final int EVENT_DRAIN_BUFFER_SIZE = 100000;
+
+ private static HwvtepHACache instance = new HwvtepHACache();
+
+ private final ConcurrentHashMap<InstanceIdentifier<Node>, Set<InstanceIdentifier<Node>>>
+ parentToChildMap = new ConcurrentHashMap<>();
+
+ private final ConcurrentHashMap<InstanceIdentifier<Node>, InstanceIdentifier<Node>>
+ childToParentMap = new ConcurrentHashMap<>();
+
+ private final ConcurrentHashMap<String, Boolean> childNodeIds = new ConcurrentHashMap<>();
+
+ private final ConcurrentHashMap<String, Boolean> connectedNodes = new ConcurrentHashMap<>();
+
+ private final LinkedBlockingQueue<DebugEvent> debugEvents = new LinkedBlockingQueue<>(MAX_EVENT_BUFFER_SIZE);
+ private Map<InstanceIdentifier<Node>, IpAddress> tepIps = new ConcurrentHashMap();
+ private Map<IpAddress, InstanceIdentifier<Node>> nodeIds = new ConcurrentHashMap();
+
+ public HwvtepHACache() {
+ }
+
+ public static HwvtepHACache getInstance() {
+ return instance;
+ }
+
+ public synchronized void addChild(InstanceIdentifier<Node> parent, InstanceIdentifier<Node> child) {
+ if (parent == null || child == null) {
+ return;
+ }
+
+ parentToChildMap.computeIfAbsent(parent, key -> new HashSet<>()).add(child);
+ childToParentMap.put(child, parent);
+ String childNodeId = child.firstKeyOf(Node.class).getNodeId().getValue();
+ childNodeIds.put(childNodeId, Boolean.TRUE);
+ addDebugEvent(new NodeEvent.ChildAddedEvent(childNodeId));
+ }
+
+ public IpAddress getTepIpOfNode(InstanceIdentifier<Node> iid) {
+ return (IpAddress)this.tepIps.get(iid);
+ }
+
+ public InstanceIdentifier<Node> getNodeIdFromTepIp(IpAddress ipAddress) {
+ return (InstanceIdentifier)this.nodeIds.get(ipAddress);
+ }
+
+ public void setTepIpOfNode(InstanceIdentifier<Node> iid, IpAddress ipAddress) {
+ this.tepIps.put(iid, ipAddress);
+ if (this.isHAParentNode(iid)) {
+ this.nodeIds.put(ipAddress, iid);
+ } else {
+ this.nodeIds.putIfAbsent(ipAddress, iid);
+ }
+
+ }
+
+ public boolean isHAEnabledDevice(InstanceIdentifier<?> iid) {
+ if (iid == null) {
+ return false;
+ }
+ boolean enabled = childToParentMap.containsKey(iid.firstIdentifierOf(Node.class));
+ if (!enabled) {
+ String psNodeId = iid.firstKeyOf(Node.class).getNodeId().getValue();
+ int idx = psNodeId.indexOf(HwvtepSouthboundConstants.PSWITCH_URI_PREFIX);
+ if (idx > 0) {
+ String globalNodeId = psNodeId.substring(0, idx - 1);
+ return childNodeIds.containsKey(globalNodeId);
+ }
+ }
+ return enabled;
+ }
+
+ public boolean isHAParentNode(InstanceIdentifier<Node> node) {
+ return parentToChildMap.containsKey(node);
+ }
+
+ public Set<InstanceIdentifier<Node>> getChildrenForHANode(InstanceIdentifier<Node> parent) {
+ if (parent != null && parentToChildMap.containsKey(parent)) {
+ return new HashSet<>(parentToChildMap.get(parent));
+ } else {
+ return Collections.emptySet();
+ }
+ }
+
+ public Set<InstanceIdentifier<Node>> getHAParentNodes() {
+ return parentToChildMap.keySet();
+ }
+
+ public Set<InstanceIdentifier<Node>> getHAChildNodes() {
+ return childToParentMap.keySet();
+ }
+
+ public InstanceIdentifier<Node> getParent(InstanceIdentifier<Node> child) {
+ if (child != null) {
+ return childToParentMap.get(child);
+ }
+ return null;
+ }
+
+ public synchronized void cleanupParent(InstanceIdentifier<Node> parent) {
+ if (parent == null) {
+ return;
+ }
+
+ if (parentToChildMap.get(parent) != null) {
+ Set<InstanceIdentifier<Node>> childs = parentToChildMap.get(parent);
+ for (InstanceIdentifier<Node> child : childs) {
+ childToParentMap.remove(child);
+ String childNodeId = child.firstKeyOf(Node.class).getNodeId().getValue();
+ childNodeIds.remove(childNodeId);
+ }
+ }
+ parentToChildMap.remove(parent);
+ }
+
+ public void updateConnectedNodeStatus(InstanceIdentifier<Node> iid) {
+ String nodeId = iid.firstKeyOf(Node.class).getNodeId().getValue();
+ connectedNodes.put(nodeId, true);
+ DebugEvent event = new NodeEvent.NodeConnectedEvent(nodeId);
+ addDebugEvent(event);
+ }
+
+ public void updateDisconnectedNodeStatus(InstanceIdentifier<Node> iid) {
+ String nodeId = iid.firstKeyOf(Node.class).getNodeId().getValue();
+ connectedNodes.put(nodeId, false);
+ DebugEvent event = new NodeEvent.NodeDisconnectedEvent(nodeId);
+ addDebugEvent(event);
+ }
+
+ public Map<String, Boolean> getConnectedNodes() {
+ return ImmutableMap.copyOf(connectedNodes);
+ }
+
+ public void addDebugEvent(DebugEvent debugEvent) {
+ //Try adding the event to event queue
+ if (!debugEvents.offer(debugEvent)) {
+ //buffer is exhausted
+ Collection<DebugEvent> list = new ArrayList<>();
+ //do not clear all events , make some place by clearing few old events
+ debugEvents.drainTo(list, EVENT_DRAIN_BUFFER_SIZE);
+
+ if (!debugEvents.offer(debugEvent)) {
+ LOG.debug("Unable to add debug event");
+ }
+ }
+ }
+
+ public List<DebugEvent> getNodeEvents() {
+ return ImmutableList.copyOf(debugEvents);
+ }
+}
+++ /dev/null
-/*
- * Copyright (c) 2017 Inocybe Technologies and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.genius.utils.hwvtep;
-
-import java.util.Map;
-import java.util.Set;
-import org.eclipse.jdt.annotation.NonNull;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-
-/**
- * Caches hwvtep Node HA info.
- *
- * @author Thomas Pantelis
- */
-public interface HwvtepNodeHACache {
- void addChild(InstanceIdentifier<Node> parentId, InstanceIdentifier<Node> childId);
-
- boolean isHAEnabledDevice(InstanceIdentifier<?> nodeId);
-
- boolean isHAParentNode(InstanceIdentifier<Node> nodeId);
-
- // Commented out for now - causes findbugs violation in netvirt
- //@NonNull
- Set<InstanceIdentifier<Node>> getChildrenForHANode(InstanceIdentifier<Node> parentId);
-
- @NonNull
- Set<InstanceIdentifier<Node>> getHAParentNodes();
-
- @NonNull
- Set<InstanceIdentifier<Node>> getHAChildNodes();
-
- InstanceIdentifier<Node> getParent(InstanceIdentifier<Node> childId);
-
- void removeParent(InstanceIdentifier<Node> parentId);
-
- void updateConnectedNodeStatus(InstanceIdentifier<Node> nodeId);
-
- void updateDisconnectedNodeStatus(InstanceIdentifier<Node> nodeId);
-
- @NonNull
- Map<String, Boolean> getNodeConnectionStatuses();
-}
--- /dev/null
+/*
+ * Copyright (c) 2016 Ericsson India Global Services Pvt Ltd. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.genius.utils.hwvtep;
+
+import java.io.PrintStream;
+import java.util.Objects;
+
+public abstract class NodeEvent extends DebugEvent {
+
+ protected final String nodeId;
+
+ public NodeEvent(String nodeId) {
+ this.nodeId = nodeId;
+ }
+
+ public String getNodeId() {
+ return nodeId;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (other instanceof NodeEvent) {
+ return Objects.equals(nodeId, ((NodeEvent) other).nodeId);
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ return nodeId != null ? nodeId.hashCode() : 0;
+ }
+
+ enum NodeStatus {
+ Connected,Disconnected
+ }
+
+ public static class NodeConnectedEvent extends NodeEvent {
+
+ public NodeConnectedEvent(String nodeId) {
+ super(nodeId);
+ }
+
+ public void print(PrintStream out) {
+ out.print(nodeId);
+ out.print(" connected");
+ }
+ }
+
+ public static class NodeDisconnectedEvent extends NodeEvent {
+
+ public NodeDisconnectedEvent(String nodeId) {
+ super(nodeId);
+ }
+
+ public void print(PrintStream out) {
+ out.print(nodeId);
+ out.print(" disconnected");
+ }
+ }
+
+ public static class ChildAddedEvent extends NodeEvent {
+
+ public ChildAddedEvent(String nodeId) {
+ super(nodeId);
+ }
+
+ public void print(PrintStream out) {
+ out.print(nodeId);
+ out.print(" became HA child");
+ }
+ }
+}
+++ /dev/null
-/*
- * Copyright (c) 2017 Inocybe Technologies and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.genius.utils.hwvtep.internal;
-
-import com.google.common.collect.ImmutableMap;
-import java.util.Collections;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import javax.inject.Singleton;
-import org.opendaylight.genius.utils.hwvtep.HwvtepNodeHACache;
-import org.opendaylight.genius.utils.hwvtep.HwvtepSouthboundConstants;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-
-/**
- * Implementation of HwvtepNodeHACache.
- *
- * @author Thomas Pantelis
- */
-@Singleton
-public class HwvtepNodeHACacheImpl implements HwvtepNodeHACache {
- private final ConcurrentHashMap<InstanceIdentifier<Node>, Set<InstanceIdentifier<Node>>> parentToChildMap =
- new ConcurrentHashMap<>();
-
- private final ConcurrentHashMap<InstanceIdentifier<Node>, InstanceIdentifier<Node>> childToParentMap =
- new ConcurrentHashMap<>();
-
- private final Set<String> childNodeIds = ConcurrentHashMap.newKeySet();
-
- private final ConcurrentHashMap<String, Boolean> connectedNodes = new ConcurrentHashMap<>();
-
- @Override
- public void addChild(InstanceIdentifier<Node> parentId, InstanceIdentifier<Node> childId) {
- if (parentId == null || childId == null) {
- return;
- }
-
- parentToChildMap.computeIfAbsent(parentId, key -> ConcurrentHashMap.newKeySet()).add(childId);
- childToParentMap.put(childId, parentId);
- String childNodeId = childId.firstKeyOf(Node.class).getNodeId().getValue();
- childNodeIds.add(childNodeId);
- }
-
- @Override
- public boolean isHAEnabledDevice(InstanceIdentifier<?> iid) {
- if (iid == null) {
- return false;
- }
- boolean enabled = childToParentMap.containsKey(iid.firstIdentifierOf(Node.class));
- if (!enabled) {
- String psNodeId = iid.firstKeyOf(Node.class).getNodeId().getValue();
- int idx = psNodeId.indexOf(HwvtepSouthboundConstants.PSWITCH_URI_PREFIX);
- if (idx > 0) {
- String globalNodeId = psNodeId.substring(0, idx - 1);
- return childNodeIds.contains(globalNodeId);
- }
- }
- return enabled;
- }
-
- @Override
- public boolean isHAParentNode(InstanceIdentifier<Node> nodeId) {
- return parentToChildMap.containsKey(nodeId);
- }
-
- @Override
- public Set<InstanceIdentifier<Node>> getChildrenForHANode(InstanceIdentifier<Node> parentId) {
- Set<InstanceIdentifier<Node>> children = parentId != null ? parentToChildMap.get(parentId) : null;
- return children != null ? children : Collections.emptySet();
- }
-
- @Override
- public Set<InstanceIdentifier<Node>> getHAParentNodes() {
- return parentToChildMap.keySet();
- }
-
- @Override
- public Set<InstanceIdentifier<Node>> getHAChildNodes() {
- return childToParentMap.keySet();
- }
-
- @Override
- public InstanceIdentifier<Node> getParent(InstanceIdentifier<Node> childId) {
- return childId != null ? childToParentMap.get(childId) : null;
- }
-
- @Override
- public void removeParent(InstanceIdentifier<Node> parentId) {
- if (parentId == null) {
- return;
- }
-
- if (parentToChildMap.get(parentId) != null) {
- Set<InstanceIdentifier<Node>> childs = parentToChildMap.get(parentId);
- for (InstanceIdentifier<Node> child : childs) {
- childToParentMap.remove(child);
- String childNodeId = child.firstKeyOf(Node.class).getNodeId().getValue();
- childNodeIds.remove(childNodeId);
- }
- }
- parentToChildMap.remove(parentId);
- }
-
- @Override
- public void updateConnectedNodeStatus(InstanceIdentifier<Node> iid) {
- String nodeId = iid.firstKeyOf(Node.class).getNodeId().getValue();
- connectedNodes.put(nodeId, true);
- }
-
- @Override
- public void updateDisconnectedNodeStatus(InstanceIdentifier<Node> iid) {
- String nodeId = iid.firstKeyOf(Node.class).getNodeId().getValue();
- connectedNodes.put(nodeId, false);
- }
-
- @Override
- public Map<String, Boolean> getNodeConnectionStatuses() {
- return ImmutableMap.copyOf(connectedNodes);
- }
-}
<service ref="mDSALManager"
interface="org.opendaylight.genius.mdsalutil.interfaces.IMdsalApiManager" />
- <service ref="hwvtepNodeHACacheImpl"
- interface="org.opendaylight.genius.utils.hwvtep.HwvtepNodeHACache"/>
<service ref="shardStatusMonitorImpl"
interface="org.opendaylight.genius.mdsalutil.interfaces.ShardStatusMonitor" />
+++ /dev/null
-/*
- * Copyright (c) 2017 Inocybe Technologies and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.genius.utils.hwvtep.internal;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableSet;
-import org.junit.Test;
-import org.opendaylight.genius.utils.hwvtep.HwvtepSouthboundConstants;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NetworkTopology;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NodeId;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.Topology;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.TopologyKey;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.NodeKey;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-
-public class HwvtepNodeHACacheImplTest {
- private final HwvtepNodeHACacheImpl hwvtepNodeHACacheImpl = new HwvtepNodeHACacheImpl();
-
- @Test
- public void testAddChild() {
- InstanceIdentifier<Node> parent = newNodeInstanceIdentifier("ha");
- InstanceIdentifier<Node> child1 = newNodeInstanceIdentifier("d1");
-
- hwvtepNodeHACacheImpl.addChild(parent, child1);
-
- assertTrue(hwvtepNodeHACacheImpl.isHAEnabledDevice(child1));
- assertTrue(hwvtepNodeHACacheImpl.isHAParentNode(parent));
-
- InstanceIdentifier<Node> child2 = newNodeInstanceIdentifier("d1");
- hwvtepNodeHACacheImpl.addChild(parent, child2);
- assertTrue(hwvtepNodeHACacheImpl.isHAEnabledDevice(child1));
- assertTrue(hwvtepNodeHACacheImpl.isHAEnabledDevice(child2));
- assertTrue(hwvtepNodeHACacheImpl.isHAParentNode(parent));
-
- assertEquals(ImmutableSet.of(child1, child2), hwvtepNodeHACacheImpl.getHAChildNodes());
- assertEquals(ImmutableSet.of(parent), hwvtepNodeHACacheImpl.getHAParentNodes());
-
- assertEquals(ImmutableSet.of(child1, child2), hwvtepNodeHACacheImpl.getChildrenForHANode(parent));
-
- hwvtepNodeHACacheImpl.removeParent(parent);
- assertFalse(hwvtepNodeHACacheImpl.isHAEnabledDevice(child1));
- assertFalse(hwvtepNodeHACacheImpl.isHAEnabledDevice(child2));
- assertFalse(hwvtepNodeHACacheImpl.isHAParentNode(parent));
- }
-
- @Test
- public void testNodeConnectionStatus() {
- InstanceIdentifier<Node> node1 = newNodeInstanceIdentifier("node1");
- InstanceIdentifier<Node> node2 = newNodeInstanceIdentifier("node2");
-
- hwvtepNodeHACacheImpl.updateConnectedNodeStatus(node1);
- assertEquals(ImmutableMap.of("node1", Boolean.TRUE), hwvtepNodeHACacheImpl.getNodeConnectionStatuses());
-
- hwvtepNodeHACacheImpl.updateConnectedNodeStatus(node2);
- assertEquals(ImmutableMap.of("node1", Boolean.TRUE, "node2", Boolean.TRUE),
- hwvtepNodeHACacheImpl.getNodeConnectionStatuses());
-
- hwvtepNodeHACacheImpl.updateDisconnectedNodeStatus(node1);
- assertEquals(ImmutableMap.of("node1", Boolean.FALSE, "node2", Boolean.TRUE),
- hwvtepNodeHACacheImpl.getNodeConnectionStatuses());
- }
-
- private static InstanceIdentifier<Node> newNodeInstanceIdentifier(String id) {
- NodeId nodeId = new NodeId(id);
- NodeKey nodeKey = new NodeKey(nodeId);
- TopologyKey topoKey = new TopologyKey(HwvtepSouthboundConstants.HWVTEP_TOPOLOGY_ID);
- return InstanceIdentifier.builder(NetworkTopology.class)
- .child(Topology.class, topoKey)
- .child(Node.class, nodeKey)
- .build();
- }
-}