MRI version bumpup for Aluminium
[netvirt.git] / elanmanager / impl / src / main / java / org / opendaylight / netvirt / elan / l2gw / ha / handlers / NodeCopier.java
index 749583e52c25e94c84b979b3472d7ba7b9095134..a731aa7119a59885e5143c614b64f7abcf629703 100644 (file)
@@ -7,24 +7,23 @@
  */
 package org.opendaylight.netvirt.elan.l2gw.ha.handlers;
 
-import static org.opendaylight.controller.md.sal.binding.api.WriteTransaction.CREATE_MISSING_PARENTS;
 import static org.opendaylight.genius.infra.Datastore.CONFIGURATION;
 
-import com.google.common.base.Optional;
 import com.google.common.util.concurrent.FutureCallback;
 import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.MoreExecutors;
+import java.util.Optional;
 import java.util.concurrent.ExecutionException;
 import javax.inject.Inject;
 import javax.inject.Singleton;
-import org.opendaylight.controller.md.sal.binding.api.DataBroker;
 import org.opendaylight.genius.infra.Datastore;
 import org.opendaylight.genius.infra.Datastore.Configuration;
 import org.opendaylight.genius.infra.Datastore.Operational;
 import org.opendaylight.genius.infra.ManagedNewTransactionRunner;
 import org.opendaylight.genius.infra.ManagedNewTransactionRunnerImpl;
 import org.opendaylight.genius.infra.TypedReadWriteTransaction;
-import org.opendaylight.infrautils.utils.concurrent.ListenableFutures;
+import org.opendaylight.infrautils.utils.concurrent.LoggingFutures;
+import org.opendaylight.mdsal.binding.api.DataBroker;
 import org.opendaylight.netvirt.elan.l2gw.ha.HwvtepHAUtil;
 import org.opendaylight.netvirt.elan.l2gw.ha.listeners.HAJobScheduler;
 import org.opendaylight.netvirt.elan.l2gw.ha.merge.GlobalAugmentationMerger;
@@ -69,7 +68,7 @@ public class NodeCopier {
             Futures.addCallback(tx.read(srcPath), new FutureCallback<Optional<Node>>() {
                 @Override
                 public void onSuccess(Optional<Node> nodeOptional) {
-                    HAJobScheduler.getInstance().submitJob(() -> ListenableFutures.addErrorLogging(
+                    HAJobScheduler.getInstance().submitJob(() -> LoggingFutures.addErrorLogging(
                         txRunner.callWithNewReadWriteTransactionAndSubmit(datastoreType, tx -> {
                             if (nodeOptional.isPresent()) {
                                 copyGlobalNode(nodeOptional, srcPath, dstPath, datastoreType, tx);
@@ -113,31 +112,35 @@ public class NodeCopier {
                 existingDstGlobalNodeOptional.isPresent() ? existingDstGlobalNodeOptional.get() : null;
         HwvtepGlobalAugmentation existingHAGlobalData = HwvtepHAUtil.getGlobalAugmentationOfNode(existingDstGlobalNode);
 
-        globalAugmentationMerger.mergeOperationalData(haBuilder, existingHAGlobalData, srcGlobalAugmentation, dstPath);
-        globalNodeMerger.mergeOperationalData(haNodeBuilder,
-                existingDstGlobalNode, srcGlobalNodeOptional.get(), dstPath);
-
 
         if (Operational.class.equals(datastoreType)) {
+            globalAugmentationMerger.mergeOperationalData(haBuilder, existingHAGlobalData, srcGlobalAugmentation,
+                    dstPath);
+            globalNodeMerger.mergeOperationalData(haNodeBuilder,
+                    existingDstGlobalNode, srcGlobalNodeOptional.get(), dstPath);
             haBuilder.setManagers(HwvtepHAUtil.buildManagersForHANode(srcGlobalNodeOptional.get(),
                     existingDstGlobalNodeOptional));
             //Also update the manager section in config which helps in cluster reboot scenarios
-            ListenableFutures.addErrorLogging(
+            LoggingFutures.addErrorLogging(
                 txRunner.callWithNewWriteOnlyTransactionAndSubmit(CONFIGURATION,
-                    confTx -> haBuilder.getManagers().forEach(manager -> {
+                    confTx -> haBuilder.getManagers().values().forEach(manager -> {
                         InstanceIdentifier<Managers> managerIid =
                             dstPath.augmentation(HwvtepGlobalAugmentation.class).child(Managers.class, manager.key());
-                        confTx.put(managerIid, manager, CREATE_MISSING_PARENTS);
+                        confTx.mergeParentStructurePut(managerIid, manager);
                     })), LOG, "Error updating the manager section in config");
 
+        } else {
+            globalAugmentationMerger.mergeConfigData(haBuilder, srcGlobalAugmentation, dstPath);
+            globalNodeMerger.mergeConfigData(haNodeBuilder, srcGlobalNodeOptional.get(), dstPath);
         }
+
         haBuilder.setDbVersion(srcGlobalAugmentation.getDbVersion());
         haNodeBuilder.addAugmentation(HwvtepGlobalAugmentation.class, haBuilder.build());
         Node haNode = haNodeBuilder.build();
         if (Operational.class.equals(datastoreType)) {
-            tx.merge(dstPath, haNode, CREATE_MISSING_PARENTS);
+            tx.mergeParentStructureMerge(dstPath, haNode);
         } else {
-            tx.put(dstPath, haNode, CREATE_MISSING_PARENTS);
+            tx.mergeParentStructurePut(dstPath, haNode);
         }
     }
 
@@ -153,7 +156,7 @@ public class NodeCopier {
                 @Override
                 public void onSuccess(Optional<Node> nodeOptional) {
                     HAJobScheduler.getInstance().submitJob(() -> {
-                        ListenableFutures.addErrorLogging(
+                        LoggingFutures.addErrorLogging(
                             txRunner.callWithNewReadWriteTransactionAndSubmit(datastoreType, tx -> {
                                 if (nodeOptional.isPresent()) {
                                     copyPSNode(nodeOptional,
@@ -171,7 +174,7 @@ public class NodeCopier {
                 @Override
                 public void onFailure(Throwable throwable) {
                 }
-            });
+            }, MoreExecutors.directExecutor());
             return;
         }
         NodeBuilder dstPsNodeBuilder = HwvtepHAUtil.getNodeBuilderForPath(dstPsPath);
@@ -180,7 +183,7 @@ public class NodeCopier {
         PhysicalSwitchAugmentation srcPsAugmenatation =
                 srcPsNodeOptional.get().augmentation(PhysicalSwitchAugmentation.class);
 
-        Node existingDstPsNode = tx.read(dstPsPath).get().orNull();
+        Node existingDstPsNode = tx.read(dstPsPath).get().orElse(null);
         PhysicalSwitchAugmentation existingDstPsAugmentation =
                 HwvtepHAUtil.getPhysicalSwitchAugmentationOfNode(existingDstPsNode);
         if (Operational.class.equals(datastoreType)) {
@@ -195,7 +198,7 @@ public class NodeCopier {
 
         dstPsNodeBuilder.addAugmentation(PhysicalSwitchAugmentation.class, dstPsAugmentationBuilder.build());
         Node dstPsNode = dstPsNodeBuilder.build();
-        tx.merge(dstPsPath, dstPsNode, CREATE_MISSING_PARENTS);
+        tx.mergeParentStructureMerge(dstPsPath, dstPsNode);
         LOG.debug("Copied {} physical switch node from {} to {}", datastoreType, srcPsPath, dstPsPath);
     }