package org.opendaylight.controller.cluster.raft.utils;
import akka.dispatch.Futures;
-import akka.japi.Procedure;
-import akka.persistence.PersistentConfirmation;
-import akka.persistence.PersistentId;
+import akka.persistence.AtomicWrite;
import akka.persistence.PersistentImpl;
import akka.persistence.PersistentRepr;
import akka.persistence.journal.japi.AsyncWriteJournal;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.Uninterruptibles;
+import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
+import java.util.function.Consumer;
+import org.apache.commons.lang.SerializationUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.concurrent.Future;
*/
public class InMemoryJournal extends AsyncWriteJournal {
+ private static class WriteMessagesComplete {
+ final CountDownLatch latch;
+ final Class<?> ofType;
+
+ public WriteMessagesComplete(int count, Class<?> ofType) {
+ this.latch = new CountDownLatch(count);
+ this.ofType = ofType;
+ }
+ }
+
static final Logger LOG = LoggerFactory.getLogger(InMemoryJournal.class);
private static final Map<String, Map<Long, Object>> journals = new ConcurrentHashMap<>();
private static final Map<String, CountDownLatch> deleteMessagesCompleteLatches = new ConcurrentHashMap<>();
- private static final Map<String, CountDownLatch> writeMessagesCompleteLatches = new ConcurrentHashMap<>();
+ private static final Map<String, WriteMessagesComplete> writeMessagesComplete = new ConcurrentHashMap<>();
private static final Map<String, CountDownLatch> blockReadMessagesLatches = new ConcurrentHashMap<>();
+ private static Object deserialize(Object data) {
+ return data instanceof byte[] ? SerializationUtils.deserialize((byte[])data) : data;
+ }
+
public static void addEntry(String persistenceId, long sequenceNr, Object data) {
Map<Long, Object> journal = journals.get(persistenceId);
if(journal == null) {
}
synchronized (journal) {
- journal.put(sequenceNr, data);
+ journal.put(sequenceNr, data instanceof Serializable ?
+ SerializationUtils.serialize((Serializable) data) : data);
}
}
synchronized (journalMap) {
List<T> journal = new ArrayList<>(journalMap.size());
for(Object entry: journalMap.values()) {
- if(type.isInstance(entry)) {
- journal.add((T) entry);
+ Object data = deserialize(entry);
+ if(type.isInstance(data)) {
+ journal.add((T) data);
}
}
}
public static void waitForWriteMessagesComplete(String persistenceId) {
- if(!Uninterruptibles.awaitUninterruptibly(writeMessagesCompleteLatches.get(persistenceId), 5, TimeUnit.SECONDS)) {
+ if(!Uninterruptibles.awaitUninterruptibly(writeMessagesComplete.get(persistenceId).latch, 5, TimeUnit.SECONDS)) {
throw new AssertionError("Journal write messages did not complete");
}
}
}
public static void addWriteMessagesCompleteLatch(String persistenceId, int count) {
- writeMessagesCompleteLatches.put(persistenceId, new CountDownLatch(count));
+ writeMessagesComplete.put(persistenceId, new WriteMessagesComplete(count, null));
+ }
+
+ public static void addWriteMessagesCompleteLatch(String persistenceId, int count, Class<?> ofType) {
+ writeMessagesComplete.put(persistenceId, new WriteMessagesComplete(count, ofType));
}
public static void addBlockReadMessagesLatch(String persistenceId, CountDownLatch latch) {
}
@Override
- public Future<Void> doAsyncReplayMessages(final String persistenceId, long fromSequenceNr,
- long toSequenceNr, long max, final Procedure<PersistentRepr> replayCallback) {
+ public Future<Void> doAsyncReplayMessages(final String persistenceId, final long fromSequenceNr,
+ final long toSequenceNr, final long max, final Consumer<PersistentRepr> replayCallback) {
+ LOG.trace("doAsyncReplayMessages for {}: fromSequenceNr: {}, toSequenceNr: {}", persistenceId,
+ fromSequenceNr,toSequenceNr);
return Futures.future(new Callable<Void>() {
@Override
public Void call() throws Exception {
}
Map<Long, Object> journal = journals.get(persistenceId);
- if(journal == null) {
+ if (journal == null) {
return null;
}
synchronized (journal) {
+ int count = 0;
for (Map.Entry<Long,Object> entry : journal.entrySet()) {
- PersistentRepr persistentMessage =
- new PersistentImpl(entry.getValue(), entry.getKey(), persistenceId,
- false, null, null);
- replayCallback.apply(persistentMessage);
+ if (++count <= max && entry.getKey() >= fromSequenceNr && entry.getKey() <= toSequenceNr) {
+ PersistentRepr persistentMessage =
+ new PersistentImpl(deserialize(entry.getValue()), entry.getKey(), persistenceId,
+ null, false, null, null);
+ replayCallback.accept(persistentMessage);
+ }
}
}
@Override
public Future<Long> doAsyncReadHighestSequenceNr(String persistenceId, long fromSequenceNr) {
- // Akka calls this during recovery.
+ LOG.trace("doAsyncReadHighestSequenceNr for {}: fromSequenceNr: {}", persistenceId, fromSequenceNr);
+ // Akka calls this during recovery.
Map<Long, Object> journal = journals.get(persistenceId);
if(journal == null) {
- return Futures.successful(-1L);
+ return Futures.successful(fromSequenceNr);
}
synchronized (journal) {
}
@Override
- public Future<Void> doAsyncWriteMessages(final Iterable<PersistentRepr> messages) {
- return Futures.future(new Callable<Void>() {
+ public Future<Iterable<Optional<Exception>>> doAsyncWriteMessages(final Iterable<AtomicWrite> messages) {
+ return Futures.future(new Callable<Iterable<Optional<Exception>>>() {
@Override
- public Void call() throws Exception {
- for (PersistentRepr repr : messages) {
- Map<Long, Object> journal = journals.get(repr.persistenceId());
- if(journal == null) {
- journal = Maps.newLinkedHashMap();
- journals.put(repr.persistenceId(), journal);
- }
-
- synchronized (journal) {
+ public Iterable<Optional<Exception>> call() throws Exception {
+ for (AtomicWrite write : messages) {
+ // Copy to array - workaround for eclipse "ambiguous method" errors for toIterator, toIterable etc
+ PersistentRepr[] array = new PersistentRepr[write.payload().size()];
+ write.payload().copyToArray(array);
+ for(PersistentRepr repr: array) {
LOG.trace("doAsyncWriteMessages: id: {}: seqNr: {}, payload: {}", repr.persistenceId(),
- repr.sequenceNr(), repr.payload());
- journal.put(repr.sequenceNr(), repr.payload());
- }
+ repr.sequenceNr(), repr.payload());
+
+ addEntry(repr.persistenceId(), repr.sequenceNr(), repr.payload());
- CountDownLatch latch = writeMessagesCompleteLatches.get(repr.persistenceId());
- if(latch != null) {
- latch.countDown();
+ WriteMessagesComplete complete = writeMessagesComplete.get(repr.persistenceId());
+ if(complete != null) {
+ if(complete.ofType == null || complete.ofType.equals(repr.payload().getClass())) {
+ complete.latch.countDown();
+ }
+ }
}
}
- return null;
+ return Collections.emptyList();
}
}, context().dispatcher());
}
@Override
- public Future<Void> doAsyncWriteConfirmations(Iterable<PersistentConfirmation> confirmations) {
- return Futures.successful(null);
- }
-
- @Override
- public Future<Void> doAsyncDeleteMessages(Iterable<PersistentId> messageIds, boolean permanent) {
- return Futures.successful(null);
- }
-
- @Override
- public Future<Void> doAsyncDeleteMessagesTo(String persistenceId, long toSequenceNr, boolean permanent) {
+ public Future<Void> doAsyncDeleteMessagesTo(String persistenceId, long toSequenceNr) {
+ LOG.trace("doAsyncDeleteMessagesTo: {}", toSequenceNr);
Map<Long, Object> journal = journals.get(persistenceId);
if(journal != null) {
synchronized (journal) {