BUG-2049: DataStore failure in StatisticsManager 39/11539/2
authorMichal Rehak <mirehak@cisco.com>
Wed, 24 Sep 2014 16:21:38 +0000 (18:21 +0200)
committermichal rehak <mirehak@cisco.com>
Wed, 24 Sep 2014 17:54:47 +0000 (17:54 +0000)
!WORKAROUND ONLY

- during node reconciliation statsManager could join together 2 similar
  sets of statistics notifications and hit duplicate values during
  processing - here we just skip those duplicates in order not to loose
  stats working thread and finish actual multipart

Change-Id: I526ab5439055ac565bfaa08e84e5043e8764846a
Signed-off-by: Michal Rehak <mirehak@cisco.com>
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/impl/StatListenCommitFlow.java

index c5aefcbf9673087a21de9f3f1677b4976d74b77d..e54fcc6fa2100cb21af3c41b0f78fc9bb51a872c 100644 (file)
@@ -82,7 +82,7 @@ import com.google.common.collect.HashBiMap;
 public class StatListenCommitFlow extends StatAbstractListenCommit<Flow, OpendaylightFlowStatisticsListener>
                                             implements OpendaylightFlowStatisticsListener {
 
-    private static final Logger LOG = LoggerFactory.getLogger(StatListenCommitFlow.class);
+    protected static final Logger LOG = LoggerFactory.getLogger(StatListenCommitFlow.class);
 
     private static final String ALIEN_SYSTEM_FLOW_ID = "#UF$TABLE*";
 
@@ -302,7 +302,11 @@ public class StatListenCommitFlow extends StatAbstractListenCommit<Flow, Openday
                     final List<FlowHashIdMap>  flowHashMap = flowHashMapping.getFlowHashIdMap() != null
                             ? flowHashMapping.getFlowHashIdMap() : Collections.<FlowHashIdMap> emptyList();
                     for (final FlowHashIdMap flowHashId : flowHashMap) {
-                        flowIdByHash.put(flowHashId.getKey(), flowHashId.getFlowId());
+                        try {
+                            flowIdByHash.put(flowHashId.getKey(), flowHashId.getFlowId());
+                        } catch (Exception e) {
+                            LOG.warn("flow hashing hit a duplicate for {} -> {}", flowHashId.getKey(), flowHashId.getFlowId());
+                        }
                     }
                 }
             }