Update MRI projects for Aluminium
[openflowplugin.git] / applications / bulk-o-matic / src / main / java / org / opendaylight / openflowplugin / applications / bulk / o / matic / FlowWriterDirectOFRpc.java
index 59d8409e56de777777b31604f05a7997502d3241..7755fcf44cb1ab83536dccbffbb657b57bcfa7b0 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2016 Ericsson Systems, Inc. and others.  All rights reserved.
+ * Copyright (c) 2016, 2017 Ericsson Systems, Inc. and others.  All rights reserved.
  *
  * This program and the accompanying materials are made available under the
  * terms of the Eclipse Public License v1.0 which accompanies this distribution,
@@ -7,17 +7,18 @@
  */
 package org.opendaylight.openflowplugin.applications.bulk.o.matic;
 
-import com.google.common.base.Optional;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashSet;
-import java.util.List;
+import java.util.Optional;
 import java.util.Set;
+import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.TimeUnit;
-import org.opendaylight.controller.md.sal.binding.api.DataBroker;
-import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
-import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
-import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
+import org.opendaylight.infrautils.utils.concurrent.LoggingFutures;
+import org.opendaylight.mdsal.binding.api.DataBroker;
+import org.opendaylight.mdsal.binding.api.ReadTransaction;
+import org.opendaylight.mdsal.common.api.LogicalDatastoreType;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.AddFlowInput;
@@ -41,29 +42,26 @@ public class FlowWriterDirectOFRpc {
     private final ExecutorService flowPusher;
     private static final long PAUSE_BETWEEN_BATCH_MILLIS = 40;
 
-    public FlowWriterDirectOFRpc(final DataBroker dataBroker,
-                                 final SalFlowService salFlowService,
-                                 final ExecutorService flowPusher) {
+    public FlowWriterDirectOFRpc(final DataBroker dataBroker, final SalFlowService salFlowService,
+            final ExecutorService flowPusher) {
         this.dataBroker = dataBroker;
         this.flowService = salFlowService;
         this.flowPusher = flowPusher;
     }
 
-
-    public void rpcFlowAdd(String dpId, int flowsPerDpn, int batchSize){
+    public void rpcFlowAdd(String dpId, int flowsPerDpn, int batchSize) {
         if (!getAllNodes().isEmpty() && getAllNodes().contains(dpId)) {
             FlowRPCHandlerTask addFlowRpcTask = new FlowRPCHandlerTask(dpId, flowsPerDpn, batchSize);
             flowPusher.execute(addFlowRpcTask);
         }
     }
 
-    public void rpcFlowAddAll(int flowsPerDpn, int batchSize){
+    public void rpcFlowAddAll(int flowsPerDpn, int batchSize) {
         Set<String> nodeIdSet = getAllNodes();
-        if (nodeIdSet.isEmpty()){
+        if (nodeIdSet.isEmpty()) {
             LOG.warn("No nodes seen on OPERATIONAL DS. Aborting !!!!");
-        }
-        else{
-            for (String dpId : nodeIdSet){
+        } else {
+            for (String dpId : nodeIdSet) {
                 LOG.info("Starting FlowRPCTaskHandler for switch id {}", dpId);
                 FlowRPCHandlerTask addFlowRpcTask = new FlowRPCHandlerTask(dpId, flowsPerDpn, batchSize);
                 flowPusher.execute(addFlowRpcTask);
@@ -71,32 +69,28 @@ public class FlowWriterDirectOFRpc {
         }
     }
 
-    private Set<String> getAllNodes(){
+    private Set<String> getAllNodes() {
 
         Set<String> nodeIds = new HashSet<>();
         InstanceIdentifier<Nodes> nodes = InstanceIdentifier.create(Nodes.class);
-        ReadOnlyTransaction rTx = dataBroker.newReadOnlyTransaction();
 
-        try {
-            Optional<Nodes> nodesDataNode = rTx.read(LogicalDatastoreType.OPERATIONAL, nodes).checkedGet();
-            if (nodesDataNode.isPresent()){
-                List<Node> nodesCollection = nodesDataNode.get().getNode();
-                if (nodesCollection != null && !nodesCollection.isEmpty()) {
+        try (ReadTransaction readOnlyTransaction = dataBroker.newReadOnlyTransaction()) {
+            Optional<Nodes> nodesDataNode = readOnlyTransaction.read(LogicalDatastoreType.OPERATIONAL, nodes).get();
+            if (nodesDataNode.isPresent()) {
+                Collection<Node> nodesCollection = nodesDataNode.get().nonnullNode().values();
+                if (!nodesCollection.isEmpty()) {
                     for (Node node : nodesCollection) {
                         LOG.info("Switch with ID {} discovered !!", node.getId().getValue());
                         nodeIds.add(node.getId().getValue());
                     }
-                }
-                else{
+                } else {
                     return Collections.emptySet();
                 }
-            }
-            else{
+            } else {
                 return Collections.emptySet();
             }
-        }
-        catch(ReadFailedException rdFailedException){
-            LOG.error("Failed to read connected nodes {}", rdFailedException);
+        } catch (InterruptedException | ExecutionException e) {
+            LOG.error("Failed to read connected nodes", e);
         }
         return nodeIds;
     }
@@ -106,9 +100,7 @@ public class FlowWriterDirectOFRpc {
         private final int flowsPerDpn;
         private final int batchSize;
 
-        public FlowRPCHandlerTask(final String dpId,
-                                  final int flowsPerDpn,
-                                  final int batchSize){
+        public FlowRPCHandlerTask(final String dpId, final int flowsPerDpn, final int batchSize) {
             this.dpId = dpId;
             this.flowsPerDpn = flowsPerDpn;
             this.batchSize = batchSize;
@@ -117,10 +109,10 @@ public class FlowWriterDirectOFRpc {
         @Override
         public void run() {
 
-            short tableId = (short)1;
+            short tableId = (short) 1;
             int initFlowId = 500;
 
-            for (int i=1; i<= flowsPerDpn; i++){
+            for (int i = 1; i <= flowsPerDpn; i++) {
 
                 String flowId = Integer.toString(initFlowId + i);
 
@@ -140,9 +132,8 @@ public class FlowWriterDirectOFRpc {
 
                 AddFlowInput addFlowInput = builder.build();
 
-                LOG.debug("RPC invocation for adding flow-id {} with input {}", flowId,
-                        addFlowInput.toString());
-                flowService.addFlow(addFlowInput);
+                LOG.debug("RPC invocation for adding flow-id {} with input {}", flowId, addFlowInput);
+                LoggingFutures.addErrorLogging(flowService.addFlow(addFlowInput), LOG, "addFlow");
 
                 if (i % batchSize == 0) {
                     try {
@@ -151,7 +142,7 @@ public class FlowWriterDirectOFRpc {
 
                         TimeUnit.MILLISECONDS.sleep(PAUSE_BETWEEN_BATCH_MILLIS);
                     } catch (InterruptedException iEx) {
-                        LOG.error("Interrupted while pausing after batched push upto {}. Ex {}", i, iEx);
+                        LOG.error("Interrupted while pausing after batched push upto {} Ex ", i, iEx);
                     }
                 }
             }