Switch to MD-SAL APIs
[openflowplugin.git] / applications / bulk-o-matic / src / main / java / org / opendaylight / openflowplugin / applications / bulk / o / matic / FlowWriterDirectOFRpc.java
1 /*
2  * Copyright (c) 2016, 2017 Ericsson Systems, Inc. and others.  All rights reserved.
3  *
4  * This program and the accompanying materials are made available under the
5  * terms of the Eclipse Public License v1.0 which accompanies this distribution,
6  * and is available at http://www.eclipse.org/legal/epl-v10.html
7  */
8 package org.opendaylight.openflowplugin.applications.bulk.o.matic;
9
10 import java.util.Collections;
11 import java.util.HashSet;
12 import java.util.List;
13 import java.util.Optional;
14 import java.util.Set;
15 import java.util.concurrent.ExecutionException;
16 import java.util.concurrent.ExecutorService;
17 import java.util.concurrent.Future;
18 import java.util.concurrent.TimeUnit;
19 import org.opendaylight.infrautils.utils.concurrent.JdkFutures;
20 import org.opendaylight.mdsal.binding.api.DataBroker;
21 import org.opendaylight.mdsal.binding.api.ReadTransaction;
22 import org.opendaylight.mdsal.common.api.LogicalDatastoreType;
23 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
24 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
25 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.AddFlowInput;
26 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.AddFlowInputBuilder;
27 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.AddFlowOutput;
28 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.FlowTableRef;
29 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.SalFlowService;
30 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.FlowRef;
31 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.flow.Match;
32 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRef;
33 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
34 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
35 import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
36 import org.opendaylight.yangtools.yang.common.RpcResult;
37 import org.slf4j.Logger;
38 import org.slf4j.LoggerFactory;
39
40 public class FlowWriterDirectOFRpc {
41
42     private static final Logger LOG = LoggerFactory.getLogger(FlowWriterDirectOFRpc.class);
43     private final DataBroker dataBroker;
44     private final SalFlowService flowService;
45     private final ExecutorService flowPusher;
46     private static final long PAUSE_BETWEEN_BATCH_MILLIS = 40;
47
48     public FlowWriterDirectOFRpc(final DataBroker dataBroker, final SalFlowService salFlowService,
49             final ExecutorService flowPusher) {
50         this.dataBroker = dataBroker;
51         this.flowService = salFlowService;
52         this.flowPusher = flowPusher;
53     }
54
55     public void rpcFlowAdd(String dpId, int flowsPerDpn, int batchSize) {
56         if (!getAllNodes().isEmpty() && getAllNodes().contains(dpId)) {
57             FlowRPCHandlerTask addFlowRpcTask = new FlowRPCHandlerTask(dpId, flowsPerDpn, batchSize);
58             flowPusher.execute(addFlowRpcTask);
59         }
60     }
61
62     public void rpcFlowAddAll(int flowsPerDpn, int batchSize) {
63         Set<String> nodeIdSet = getAllNodes();
64         if (nodeIdSet.isEmpty()) {
65             LOG.warn("No nodes seen on OPERATIONAL DS. Aborting !!!!");
66         } else {
67             for (String dpId : nodeIdSet) {
68                 LOG.info("Starting FlowRPCTaskHandler for switch id {}", dpId);
69                 FlowRPCHandlerTask addFlowRpcTask = new FlowRPCHandlerTask(dpId, flowsPerDpn, batchSize);
70                 flowPusher.execute(addFlowRpcTask);
71             }
72         }
73     }
74
75     private Set<String> getAllNodes() {
76
77         Set<String> nodeIds = new HashSet<>();
78         InstanceIdentifier<Nodes> nodes = InstanceIdentifier.create(Nodes.class);
79
80         try (ReadTransaction readOnlyTransaction = dataBroker.newReadOnlyTransaction()) {
81             Optional<Nodes> nodesDataNode = readOnlyTransaction.read(LogicalDatastoreType.OPERATIONAL, nodes).get();
82             if (nodesDataNode.isPresent()) {
83                 List<Node> nodesCollection = nodesDataNode.get().getNode();
84                 if (nodesCollection != null && !nodesCollection.isEmpty()) {
85                     for (Node node : nodesCollection) {
86                         LOG.info("Switch with ID {} discovered !!", node.getId().getValue());
87                         nodeIds.add(node.getId().getValue());
88                     }
89                 } else {
90                     return Collections.emptySet();
91                 }
92             } else {
93                 return Collections.emptySet();
94             }
95         } catch (InterruptedException | ExecutionException e) {
96             LOG.error("Failed to read connected nodes {}", e);
97         }
98         return nodeIds;
99     }
100
101     public class FlowRPCHandlerTask implements Runnable {
102         private final String dpId;
103         private final int flowsPerDpn;
104         private final int batchSize;
105
106         public FlowRPCHandlerTask(final String dpId, final int flowsPerDpn, final int batchSize) {
107             this.dpId = dpId;
108             this.flowsPerDpn = flowsPerDpn;
109             this.batchSize = batchSize;
110         }
111
112         @Override
113         public void run() {
114
115             short tableId = (short) 1;
116             int initFlowId = 500;
117
118             for (int i = 1; i <= flowsPerDpn; i++) {
119
120                 String flowId = Integer.toString(initFlowId + i);
121
122                 LOG.debug("Framing AddFlowInput for flow-id {}", flowId);
123
124                 Match match = BulkOMaticUtils.getMatch(i);
125                 InstanceIdentifier<Node> nodeIId = BulkOMaticUtils.getFlowCapableNodeId(dpId);
126                 InstanceIdentifier<Table> tableIId = BulkOMaticUtils.getTableId(tableId, dpId);
127                 InstanceIdentifier<Flow> flowIId = BulkOMaticUtils.getFlowId(tableIId, flowId);
128
129                 Flow flow = BulkOMaticUtils.buildFlow(tableId, flowId, match);
130
131                 AddFlowInputBuilder builder = new AddFlowInputBuilder(flow);
132                 builder.setNode(new NodeRef(nodeIId));
133                 builder.setFlowTable(new FlowTableRef(tableIId));
134                 builder.setFlowRef(new FlowRef(flowIId));
135
136                 AddFlowInput addFlowInput = builder.build();
137
138                 LOG.debug("RPC invocation for adding flow-id {} with input {}", flowId, addFlowInput.toString());
139                 final Future<RpcResult<AddFlowOutput>> resultFuture = flowService.addFlow(addFlowInput);
140                 JdkFutures.addErrorLogging(resultFuture, LOG, "addFlow");
141
142                 if (i % batchSize == 0) {
143                     try {
144                         LOG.info("Pausing for {} MILLISECONDS after batch of {} RPC invocations",
145                                 PAUSE_BETWEEN_BATCH_MILLIS, batchSize);
146
147                         TimeUnit.MILLISECONDS.sleep(PAUSE_BETWEEN_BATCH_MILLIS);
148                     } catch (InterruptedException iEx) {
149                         LOG.error("Interrupted while pausing after batched push upto {} Ex ", i, iEx);
150                     }
151                 }
152             }
153         }
154     }
155 }