import com.google.common.collect.Sets;
import java.util.List;
import java.util.Set;
-import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
as it is expecting the logical switch to be already present in operational ds
(created in the device )
*/
- HAJobScheduler.getInstance().submitJob(new Callable<Void>() {
- @Override
- public Void call() throws InterruptedException, ExecutionException, ReadFailedException,
- TransactionCommitFailedException {
- hwvtepHACache.updateConnectedNodeStatus(childNodePath);
- LOG.info("HA child reconnected handleNodeReConnected {}",
- childNode.getNodeId().getValue());
- ReadWriteTransaction tx = db.newReadWriteTransaction();
- copyHAPSConfigToChildPS(haPSCfg.get(), childNodePath, tx);
- tx.submit().checkedGet();
- return null;
+ HAJobScheduler.getInstance().submitJob(new Runnable() {
+ public void run() {
+ try {
+ hwvtepHACache.updateConnectedNodeStatus(childNodePath);
+ LOG.info("HA child reconnected handleNodeReConnected {}",
+ childNode.getNodeId().getValue());
+ ReadWriteTransaction tx = db.newReadWriteTransaction();
+ copyHAPSConfigToChildPS(haPSCfg.get(), childNodePath, tx);
+ tx.submit().checkedGet();
+ } catch (InterruptedException | ExecutionException | ReadFailedException
+ | TransactionCommitFailedException e) {
+ LOG.error("Failed to process ", e);
+ }
}
});
import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-public class HAJobScheduler {
+public class HAJobScheduler implements Thread.UncaughtExceptionHandler {
+ private static final Logger LOG = LoggerFactory.getLogger(HAOpClusteredListener.class);
ExecutorService executorService;
static HAJobScheduler instance = new HAJobScheduler();
private HAJobScheduler() {
ThreadFactory threadFact = new ThreadFactoryBuilder()
- .setNameFormat("hwvtep-ha-task-%d").build();
+ .setNameFormat("hwvtep-ha-task-%d").setUncaughtExceptionHandler(this).build();
executorService = Executors.newSingleThreadScheduledExecutor(threadFact);
}
executorService = service;
}
- public Future<Void> submitJob(Callable<Void> callable) {
- return executorService.submit(callable);
+ public void submitJob(Runnable runnable) {
+ executorService.execute(runnable);
}
- public void submitJob(Runnable runnable) {
- executorService.submit(runnable);
+ @Override
+ public void uncaughtException(Thread thread, Throwable throwable) {
+ LOG.error("Failed to execute task", throwable);
}
}
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
import org.opendaylight.controller.md.sal.binding.api.DataTreeChangeListener;
void handleNodeConnected(final InstanceIdentifier<Node> childPath,
final Node childNode,
final InstanceIdentifier<Node> haNodePath) {
- HAJobScheduler.getInstance().submitJob(new Callable<Void>() {
- @Override
- public Void call() throws InterruptedException, ExecutionException, ReadFailedException,
- TransactionCommitFailedException {
- LOG.info("Ha child connected handleNodeConnected {}", childNode.getNodeId().getValue());
- ReadWriteTransaction tx = getTx();
- haEventHandler.handleChildNodeConnected(childNode, childPath, haNodePath, tx);
- tx.submit().checkedGet();
- return null;
+ HAJobScheduler.getInstance().submitJob(new Runnable() {
+ public void run() {
+ try {
+ LOG.info("Ha child connected handleNodeConnected {}", childNode.getNodeId().getValue());
+ ReadWriteTransaction tx = getTx();
+ haEventHandler.handleChildNodeConnected(childNode, childPath, haNodePath, tx);
+ tx.submit().checkedGet();
+ } catch (InterruptedException | ExecutionException | ReadFailedException
+ | TransactionCommitFailedException e) {
+ LOG.error("Failed to process ", e);
+ }
}
});
}
final InstanceIdentifier<Node> haNodePath,
final Optional<Node> haGlobalCfg,
final Optional<Node> haPSCfg) {
- HAJobScheduler.getInstance().submitJob(new Callable<Void>() {
- @Override
- public Void call() throws InterruptedException, ExecutionException, ReadFailedException,
- TransactionCommitFailedException {
- LOG.info("Ha child reconnected handleNodeReConnected {}", childNode.getNodeId().getValue());
- ReadWriteTransaction tx = getTx();
- haEventHandler.handleChildNodeReConnected(childNode, childPath,
- haNodePath, haGlobalCfg, haPSCfg, tx);
- tx.submit().checkedGet();
- return null;
+ HAJobScheduler.getInstance().submitJob(new Runnable() {
+ public void run() {
+ try {
+ LOG.info("Ha child reconnected handleNodeReConnected {}", childNode.getNodeId().getValue());
+ ReadWriteTransaction tx = getTx();
+ haEventHandler.handleChildNodeReConnected(childNode, childPath,
+ haNodePath, haGlobalCfg, haPSCfg, tx);
+ tx.submit().checkedGet();
+ } catch (InterruptedException | ExecutionException | ReadFailedException
+ | TransactionCommitFailedException e) {
+ LOG.error("Failed to process ", e);
+ }
}
});
}