Merge "Use String(byte[], Charset)"
[openflowplugin.git] / applications / bulk-o-matic / src / main / java / org / opendaylight / openflowplugin / applications / bulk / o / matic / FlowWriterDirectOFRpc.java
index 3a743fcfcc12681bfc5c2b06d59cb27d75b4af04..02bc9bd88cf7bae8e72577db2ccd1f325c1b8544 100644 (file)
@@ -7,17 +7,18 @@
  */
 package org.opendaylight.openflowplugin.applications.bulk.o.matic;
 
-import com.google.common.base.Optional;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Optional;
 import java.util.Set;
+import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.TimeUnit;
-import org.opendaylight.controller.md.sal.binding.api.DataBroker;
-import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
-import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
-import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
+import org.opendaylight.infrautils.utils.concurrent.LoggingFutures;
+import org.opendaylight.mdsal.binding.api.DataBroker;
+import org.opendaylight.mdsal.binding.api.ReadTransaction;
+import org.opendaylight.mdsal.common.api.LogicalDatastoreType;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.AddFlowInput;
@@ -72,11 +73,9 @@ public class FlowWriterDirectOFRpc {
 
         Set<String> nodeIds = new HashSet<>();
         InstanceIdentifier<Nodes> nodes = InstanceIdentifier.create(Nodes.class);
-        ReadOnlyTransaction readOnlyTransaction = dataBroker.newReadOnlyTransaction();
 
-        try {
-            Optional<Nodes> nodesDataNode = readOnlyTransaction.read(LogicalDatastoreType.OPERATIONAL, nodes)
-                    .checkedGet();
+        try (ReadTransaction readOnlyTransaction = dataBroker.newReadOnlyTransaction()) {
+            Optional<Nodes> nodesDataNode = readOnlyTransaction.read(LogicalDatastoreType.OPERATIONAL, nodes).get();
             if (nodesDataNode.isPresent()) {
                 List<Node> nodesCollection = nodesDataNode.get().getNode();
                 if (nodesCollection != null && !nodesCollection.isEmpty()) {
@@ -90,8 +89,8 @@ public class FlowWriterDirectOFRpc {
             } else {
                 return Collections.emptySet();
             }
-        } catch (ReadFailedException rdFailedException) {
-            LOG.error("Failed to read connected nodes {}", rdFailedException);
+        } catch (InterruptedException | ExecutionException e) {
+            LOG.error("Failed to read connected nodes", e);
         }
         return nodeIds;
     }
@@ -133,8 +132,8 @@ public class FlowWriterDirectOFRpc {
 
                 AddFlowInput addFlowInput = builder.build();
 
-                LOG.debug("RPC invocation for adding flow-id {} with input {}", flowId, addFlowInput.toString());
-                flowService.addFlow(addFlowInput);
+                LOG.debug("RPC invocation for adding flow-id {} with input {}", flowId, addFlowInput);
+                LoggingFutures.addErrorLogging(flowService.addFlow(addFlowInput), LOG, "addFlow");
 
                 if (i % batchSize == 0) {
                     try {
@@ -143,7 +142,7 @@ public class FlowWriterDirectOFRpc {
 
                         TimeUnit.MILLISECONDS.sleep(PAUSE_BETWEEN_BATCH_MILLIS);
                     } catch (InterruptedException iEx) {
-                        LOG.error("Interrupted while pausing after batched push upto {}. Ex {}", i, iEx);
+                        LOG.error("Interrupted while pausing after batched push upto {} Ex ", i, iEx);
                     }
                 }
             }