/*
- * Copyright (c) 2016 Ericsson Systems, Inc. and others. All rights reserved.
+ * Copyright (c) 2016, 2017 Ericsson Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
*/
package org.opendaylight.openflowplugin.applications.bulk.o.matic;
-import com.google.common.base.Optional;
-import org.opendaylight.controller.md.sal.binding.api.DataBroker;
-import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
-import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
-import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+import org.opendaylight.infrautils.utils.concurrent.LoggingFutures;
+import org.opendaylight.mdsal.binding.api.DataBroker;
+import org.opendaylight.mdsal.binding.api.ReadTransaction;
+import org.opendaylight.mdsal.common.api.LogicalDatastoreType;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.AddFlowInput;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.TimeUnit;
-
public class FlowWriterDirectOFRpc {
private static final Logger LOG = LoggerFactory.getLogger(FlowWriterDirectOFRpc.class);
private final ExecutorService flowPusher;
private static final long PAUSE_BETWEEN_BATCH_MILLIS = 40;
- public FlowWriterDirectOFRpc(final DataBroker dataBroker,
- final SalFlowService salFlowService,
- final ExecutorService flowPusher) {
+ public FlowWriterDirectOFRpc(final DataBroker dataBroker, final SalFlowService salFlowService,
+ final ExecutorService flowPusher) {
this.dataBroker = dataBroker;
this.flowService = salFlowService;
this.flowPusher = flowPusher;
}
-
- public void rpcFlowAdd(String dpId, int flowsPerDpn, int batchSize){
+ public void rpcFlowAdd(String dpId, int flowsPerDpn, int batchSize) {
if (!getAllNodes().isEmpty() && getAllNodes().contains(dpId)) {
FlowRPCHandlerTask addFlowRpcTask = new FlowRPCHandlerTask(dpId, flowsPerDpn, batchSize);
flowPusher.execute(addFlowRpcTask);
}
}
- public void rpcFlowAddAll(int flowsPerDpn, int batchSize){
+ public void rpcFlowAddAll(int flowsPerDpn, int batchSize) {
Set<String> nodeIdSet = getAllNodes();
- if (nodeIdSet.isEmpty()){
+ if (nodeIdSet.isEmpty()) {
LOG.warn("No nodes seen on OPERATIONAL DS. Aborting !!!!");
- }
- else{
- for (String dpId : nodeIdSet){
+ } else {
+ for (String dpId : nodeIdSet) {
LOG.info("Starting FlowRPCTaskHandler for switch id {}", dpId);
FlowRPCHandlerTask addFlowRpcTask = new FlowRPCHandlerTask(dpId, flowsPerDpn, batchSize);
flowPusher.execute(addFlowRpcTask);
}
}
- private Set<String> getAllNodes(){
+ private Set<String> getAllNodes() {
Set<String> nodeIds = new HashSet<>();
InstanceIdentifier<Nodes> nodes = InstanceIdentifier.create(Nodes.class);
- ReadOnlyTransaction rTx = dataBroker.newReadOnlyTransaction();
- try {
- Optional<Nodes> nodesDataNode = rTx.read(LogicalDatastoreType.OPERATIONAL, nodes).checkedGet();
- if (nodesDataNode.isPresent()){
+ try (ReadTransaction readOnlyTransaction = dataBroker.newReadOnlyTransaction()) {
+ Optional<Nodes> nodesDataNode = readOnlyTransaction.read(LogicalDatastoreType.OPERATIONAL, nodes).get();
+ if (nodesDataNode.isPresent()) {
List<Node> nodesCollection = nodesDataNode.get().getNode();
if (nodesCollection != null && !nodesCollection.isEmpty()) {
for (Node node : nodesCollection) {
LOG.info("Switch with ID {} discovered !!", node.getId().getValue());
nodeIds.add(node.getId().getValue());
}
- }
- else{
+ } else {
return Collections.emptySet();
}
- }
- else{
+ } else {
return Collections.emptySet();
}
- }
- catch(ReadFailedException rdFailedException){
- LOG.error("Failed to read connected nodes {}", rdFailedException);
+ } catch (InterruptedException | ExecutionException e) {
+ LOG.error("Failed to read connected nodes", e);
}
return nodeIds;
}
private final int flowsPerDpn;
private final int batchSize;
- public FlowRPCHandlerTask(final String dpId,
- final int flowsPerDpn,
- final int batchSize){
+ public FlowRPCHandlerTask(final String dpId, final int flowsPerDpn, final int batchSize) {
this.dpId = dpId;
this.flowsPerDpn = flowsPerDpn;
this.batchSize = batchSize;
@Override
public void run() {
- short tableId = (short)1;
+ short tableId = (short) 1;
int initFlowId = 500;
- for (int i=1; i<= flowsPerDpn; i++){
+ for (int i = 1; i <= flowsPerDpn; i++) {
String flowId = Integer.toString(initFlowId + i);
AddFlowInput addFlowInput = builder.build();
- LOG.debug("RPC invocation for adding flow-id {} with input {}", flowId,
- addFlowInput.toString());
- flowService.addFlow(addFlowInput);
+ LOG.debug("RPC invocation for adding flow-id {} with input {}", flowId, addFlowInput);
+ LoggingFutures.addErrorLogging(flowService.addFlow(addFlowInput), LOG, "addFlow");
if (i % batchSize == 0) {
try {
TimeUnit.MILLISECONDS.sleep(PAUSE_BETWEEN_BATCH_MILLIS);
} catch (InterruptedException iEx) {
- LOG.error("Interrupted while pausing after batched push upto {}. Ex {}", i, iEx);
+ LOG.error("Interrupted while pausing after batched push upto {} Ex ", i, iEx);
}
}
}