import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
-import static org.opendaylight.controller.cluster.datastore.messages.CreateTransaction.CURRENT_VERSION;
+import static org.opendaylight.controller.cluster.datastore.DataStoreVersions.CURRENT_VERSION;
import akka.actor.ActorRef;
+import akka.actor.ActorSelection;
import akka.actor.PoisonPill;
import akka.actor.Props;
import akka.dispatch.Dispatchers;
import akka.util.Timeout;
import com.google.common.base.Function;
import com.google.common.base.Optional;
-import com.google.common.util.concurrent.CheckedFuture;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
-import org.junit.After;
-import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
-import org.mockito.invocation.InvocationOnMock;
-import org.mockito.stubbing.Answer;
+import org.opendaylight.controller.cluster.DataPersistenceProvider;
+import org.opendaylight.controller.cluster.DelegatingPersistentDataProvider;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.BatchedModifications;
+import org.opendaylight.controller.cluster.datastore.messages.BatchedModificationsReply;
import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
import org.opendaylight.controller.cluster.datastore.messages.ForwardedReadyTransaction;
import org.opendaylight.controller.cluster.datastore.messages.PeerAddressResolved;
+import org.opendaylight.controller.cluster.datastore.messages.ReadData;
+import org.opendaylight.controller.cluster.datastore.messages.ReadDataReply;
import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListener;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListenerReply;
import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext;
+import org.opendaylight.controller.cluster.datastore.modification.DeleteModification;
import org.opendaylight.controller.cluster.datastore.modification.MergeModification;
import org.opendaylight.controller.cluster.datastore.modification.Modification;
+import org.opendaylight.controller.cluster.datastore.modification.ModificationPayload;
import org.opendaylight.controller.cluster.datastore.modification.MutableCompositeModification;
import org.opendaylight.controller.cluster.datastore.modification.WriteModification;
-import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec;
-import org.opendaylight.controller.cluster.datastore.utils.InMemoryJournal;
-import org.opendaylight.controller.cluster.datastore.utils.InMemorySnapshotStore;
+import org.opendaylight.controller.cluster.datastore.utils.MessageCollectorActor;
import org.opendaylight.controller.cluster.datastore.utils.MockDataChangeListener;
+import org.opendaylight.controller.cluster.datastore.utils.SerializationUtils;
+import org.opendaylight.controller.cluster.notifications.RegisterRoleChangeListener;
+import org.opendaylight.controller.cluster.notifications.RegisterRoleChangeListenerReply;
+import org.opendaylight.controller.cluster.raft.RaftActorContext;
import org.opendaylight.controller.cluster.raft.ReplicatedLogEntry;
import org.opendaylight.controller.cluster.raft.ReplicatedLogImplEntry;
import org.opendaylight.controller.cluster.raft.Snapshot;
-import org.opendaylight.controller.cluster.raft.base.messages.ApplyLogEntries;
+import org.opendaylight.controller.cluster.raft.base.messages.ApplyJournalEntries;
import org.opendaylight.controller.cluster.raft.base.messages.ApplySnapshot;
import org.opendaylight.controller.cluster.raft.base.messages.ApplyState;
-import org.opendaylight.controller.cluster.raft.base.messages.CaptureSnapshot;
import org.opendaylight.controller.cluster.raft.base.messages.ElectionTimeout;
+import org.opendaylight.controller.cluster.raft.base.messages.FollowerInitialSyncUpStatus;
import org.opendaylight.controller.cluster.raft.client.messages.FindLeader;
import org.opendaylight.controller.cluster.raft.client.messages.FindLeaderReply;
-import org.opendaylight.controller.cluster.raft.protobuff.client.messages.CompositeModificationByteStringPayload;
-import org.opendaylight.controller.cluster.raft.protobuff.client.messages.CompositeModificationPayload;
-import org.opendaylight.controller.cluster.raft.protobuff.client.messages.Payload;
+import org.opendaylight.controller.cluster.raft.utils.InMemoryJournal;
+import org.opendaylight.controller.cluster.raft.utils.InMemorySnapshotStore;
import org.opendaylight.controller.md.cluster.datastore.model.SchemaContextHelper;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker;
-import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
-import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreFactory;
-import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
+import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.CreateTransactionReply;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
+import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
import org.opendaylight.yangtools.yang.data.api.schema.MapEntryNode;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
-public class ShardTest extends AbstractActorTest {
-
- private static final SchemaContext SCHEMA_CONTEXT = TestModel.createTestContext();
-
- private static final AtomicInteger NEXT_SHARD_NUM = new AtomicInteger();
-
- private final ShardIdentifier shardID = ShardIdentifier.builder().memberName("member-1")
- .shardName("inventory").type("config" + NEXT_SHARD_NUM.getAndIncrement()).build();
-
- private DatastoreContext dataStoreContext = DatastoreContext.newBuilder().
- shardJournalRecoveryLogBatchSize(3).shardSnapshotBatchCount(5000).
- shardHeartbeatIntervalInMillis(100).build();
-
- @Before
- public void setUp() {
- InMemorySnapshotStore.clear();
- InMemoryJournal.clear();
- }
-
- @After
- public void tearDown() {
- InMemorySnapshotStore.clear();
- InMemoryJournal.clear();
- }
-
- private Props newShardProps() {
- return Shard.props(shardID, Collections.<ShardIdentifier,String>emptyMap(),
- dataStoreContext, SCHEMA_CONTEXT);
- }
+public class ShardTest extends AbstractShardTest {
@Test
public void testRegisterChangeListener() throws Exception {
"testRegisterChangeListener-DataChangeListener");
shard.tell(new RegisterChangeListener(TestModel.TEST_PATH,
- dclActor.path(), AsyncDataBroker.DataChangeScope.BASE), getRef());
+ dclActor, AsyncDataBroker.DataChangeScope.BASE), getRef());
RegisterChangeListenerReply reply = expectMsgClass(duration("3 seconds"),
RegisterChangeListenerReply.class);
@Override
public Shard create() throws Exception {
- return new Shard(shardID, Collections.<ShardIdentifier,String>emptyMap(),
- dataStoreContext, SCHEMA_CONTEXT) {
+ // Use a non persistent provider because this test actually invokes persist on the journal
+ // this will cause all other messages to not be queued properly after that.
+ // The basic issue is that you cannot use TestActorRef with a persistent actor (at least when
+ // it does do a persist)
+ return new Shard(shardID, Collections.<String,String>emptyMap(),
+ dataStoreContextBuilder.persistent(false).build(), SCHEMA_CONTEXT) {
@Override
public void onReceiveCommand(final Object message) throws Exception {
if(message instanceof ElectionTimeout && firstElectionTimeout) {
onFirstElectionTimeout.await(5, TimeUnit.SECONDS));
// Now send the RegisterChangeListener and wait for the reply.
- shard.tell(new RegisterChangeListener(path, dclActor.path(),
+ shard.tell(new RegisterChangeListener(path, dclActor,
AsyncDataBroker.DataChangeScope.SUBTREE), getRef());
RegisterChangeListenerReply reply = expectMsgClass(duration("5 seconds"),
final CountDownLatch recoveryComplete = new CountDownLatch(1);
class TestShard extends Shard {
TestShard() {
- super(shardID, Collections.<ShardIdentifier, String>singletonMap(shardID, null),
- dataStoreContext, SCHEMA_CONTEXT);
+ super(shardID, Collections.<String, String>singletonMap(shardID.toString(), null),
+ newDatastoreContext(), SCHEMA_CONTEXT);
}
Map<String, String> getPeerAddresses() {
Uninterruptibles.awaitUninterruptibly(recoveryComplete, 5, TimeUnit.SECONDS));
String address = "akka://foobar";
- shard.underlyingActor().onReceiveCommand(new PeerAddressResolved(shardID, address));
+ shard.underlyingActor().onReceiveCommand(new PeerAddressResolved(shardID.toString(), address));
assertEquals("getPeerAddresses", address,
((TestShard)shard.underlyingActor()).getPeerAddresses().get(shardID.toString()));
TestActorRef<Shard> shard = TestActorRef.create(getSystem(), newShardProps(),
"testApplySnapshot");
- NormalizedNodeToNodeCodec codec =
- new NormalizedNodeToNodeCodec(SCHEMA_CONTEXT);
+ InMemoryDOMDataStore store = new InMemoryDOMDataStore("OPER", MoreExecutors.sameThreadExecutor());
+ store.onGlobalContextUpdated(SCHEMA_CONTEXT);
- writeToStore(shard, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
+ writeToStore(store, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
YangInstanceIdentifier root = YangInstanceIdentifier.builder().build();
- NormalizedNode<?,?> expected = readStore(shard, root);
-
- NormalizedNodeMessages.Container encode = codec.encode(expected);
+ NormalizedNode<?,?> expected = readStore(store, root);
ApplySnapshot applySnapshot = new ApplySnapshot(Snapshot.create(
- encode.getNormalizedNode().toByteString().toByteArray(),
+ SerializationUtils.serializeNormalizedNode(expected),
Collections.<ReplicatedLogEntry>emptyList(), 1, 2, 3, 4));
shard.underlyingActor().onReceiveCommand(applySnapshot);
NormalizedNode<?,?> actual = readStore(shard, root);
- assertEquals(expected, actual);
+ assertEquals("Root node", expected, actual);
shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
}
NormalizedNode<?, ?> node = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- MutableCompositeModification compMod = new MutableCompositeModification();
- compMod.addModification(new WriteModification(TestModel.TEST_PATH, node, SCHEMA_CONTEXT));
- Payload payload = new CompositeModificationPayload(compMod.toSerializable());
- ApplyState applyState = new ApplyState(null, "test",
- new ReplicatedLogImplEntry(1, 2, payload));
+ ApplyState applyState = new ApplyState(null, "test", new ReplicatedLogImplEntry(1, 2,
+ newModificationPayload(new WriteModification(TestModel.TEST_PATH, node))));
shard.underlyingActor().onReceiveCommand(applyState);
shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
}
- @SuppressWarnings("serial")
@Test
public void testRecovery() throws Exception {
InMemoryDOMDataStore testStore = InMemoryDOMDataStoreFactory.create("Test", null, null);
testStore.onGlobalContextUpdated(SCHEMA_CONTEXT);
- DOMStoreWriteTransaction writeTx = testStore.newWriteOnlyTransaction();
- writeTx.write(TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
- DOMStoreThreePhaseCommitCohort commitCohort = writeTx.ready();
- commitCohort.preCommit().get();
- commitCohort.commit().get();
+ writeToStore(testStore, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
- DOMStoreReadTransaction readTx = testStore.newReadOnlyTransaction();
- NormalizedNode<?, ?> root = readTx.read(YangInstanceIdentifier.builder().build()).get().get();
+ NormalizedNode<?, ?> root = readStore(testStore, YangInstanceIdentifier.builder().build());
InMemorySnapshotStore.addSnapshot(shardID.toString(), Snapshot.create(
- new NormalizedNodeToNodeCodec(SCHEMA_CONTEXT).encode(
- root).
- getNormalizedNode().toByteString().toByteArray(),
- Collections.<ReplicatedLogEntry>emptyList(), 0, 1, -1, -1));
+ SerializationUtils.serializeNormalizedNode(root),
+ Collections.<ReplicatedLogEntry>emptyList(), 0, 1, -1, -1));
// Set up the InMemoryJournal.
- InMemoryJournal.addEntry(shardID.toString(), 0, new ReplicatedLogImplEntry(0, 1, newPayload(
+ InMemoryJournal.addEntry(shardID.toString(), 0, new ReplicatedLogImplEntry(0, 1, newModificationPayload(
new WriteModification(TestModel.OUTER_LIST_PATH,
- ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(),
- SCHEMA_CONTEXT))));
+ ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build()))));
int nListEntries = 16;
Set<Integer> listEntryKeys = new HashSet<>();
- for(int i = 1; i <= nListEntries-5; i++) {
- listEntryKeys.add(Integer.valueOf(i));
- YangInstanceIdentifier path = YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH)
- .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, i).build();
- Modification mod = new MergeModification(path,
- ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, i),
- SCHEMA_CONTEXT);
- InMemoryJournal.addEntry(shardID.toString(), i, new ReplicatedLogImplEntry(i, 1,
- newPayload(mod)));
- }
- // Add some of the new CompositeModificationByteStringPayload
- for(int i = 11; i <= nListEntries; i++) {
+ // Add some ModificationPayload entries
+ for(int i = 1; i <= nListEntries; i++) {
listEntryKeys.add(Integer.valueOf(i));
YangInstanceIdentifier path = YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH)
.nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, i).build();
Modification mod = new MergeModification(path,
- ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, i),
- SCHEMA_CONTEXT);
+ ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, i));
InMemoryJournal.addEntry(shardID.toString(), i, new ReplicatedLogImplEntry(i, 1,
- newByteStringPayload(mod)));
+ newModificationPayload(mod)));
}
-
InMemoryJournal.addEntry(shardID.toString(), nListEntries + 1,
- new ApplyLogEntries(nListEntries));
-
- // Create the actor and wait for recovery complete.
+ new ApplyJournalEntries(nListEntries));
- final CountDownLatch recoveryComplete = new CountDownLatch(1);
-
- Creator<Shard> creator = new Creator<Shard>() {
- @Override
- public Shard create() throws Exception {
- return new Shard(shardID, Collections.<ShardIdentifier,String>emptyMap(),
- dataStoreContext, SCHEMA_CONTEXT) {
- @Override
- protected void onRecoveryComplete() {
- try {
- super.onRecoveryComplete();
- } finally {
- recoveryComplete.countDown();
- }
- }
- };
- }
- };
-
- TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
- Props.create(new DelegatingShardCreator(creator)), "testRecovery");
-
- assertEquals("Recovery complete", true, recoveryComplete.await(5, TimeUnit.SECONDS));
-
- // Verify data in the data store.
-
- NormalizedNode<?, ?> outerList = readStore(shard, TestModel.OUTER_LIST_PATH);
- assertNotNull(TestModel.OUTER_LIST_QNAME.getLocalName() + " not found", outerList);
- assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " value is not Iterable",
- outerList.getValue() instanceof Iterable);
- for(Object entry: (Iterable<?>) outerList.getValue()) {
- assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " entry is not MapEntryNode",
- entry instanceof MapEntryNode);
- MapEntryNode mapEntry = (MapEntryNode)entry;
- Optional<DataContainerChild<? extends PathArgument, ?>> idLeaf =
- mapEntry.getChild(new YangInstanceIdentifier.NodeIdentifier(TestModel.ID_QNAME));
- assertTrue("Missing leaf " + TestModel.ID_QNAME.getLocalName(), idLeaf.isPresent());
- Object value = idLeaf.get().getValue();
- assertTrue("Unexpected value for leaf "+ TestModel.ID_QNAME.getLocalName() + ": " + value,
- listEntryKeys.remove(value));
- }
-
- if(!listEntryKeys.isEmpty()) {
- fail("Missing " + TestModel.OUTER_LIST_QNAME.getLocalName() + " entries with keys: " +
- listEntryKeys);
- }
-
- assertEquals("Last log index", nListEntries,
- shard.underlyingActor().getShardMBean().getLastLogIndex());
- assertEquals("Commit index", nListEntries,
- shard.underlyingActor().getShardMBean().getCommitIndex());
- assertEquals("Last applied", nListEntries,
- shard.underlyingActor().getShardMBean().getLastApplied());
-
- shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ testRecovery(listEntryKeys);
}
- private CompositeModificationPayload newPayload(final Modification... mods) {
+ private ModificationPayload newModificationPayload(final Modification... mods) throws IOException {
MutableCompositeModification compMod = new MutableCompositeModification();
for(Modification mod: mods) {
compMod.addModification(mod);
}
- return new CompositeModificationPayload(compMod.toSerializable());
- }
-
- private CompositeModificationByteStringPayload newByteStringPayload(final Modification... mods) {
- MutableCompositeModification compMod = new MutableCompositeModification();
- for(Modification mod: mods) {
- compMod.addModification(mod);
- }
-
- return new CompositeModificationByteStringPayload(compMod.toSerializable());
- }
-
-
- private DOMStoreThreePhaseCommitCohort setupMockWriteTransaction(final String cohortName,
- final InMemoryDOMDataStore dataStore, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data,
- final MutableCompositeModification modification) {
- return setupMockWriteTransaction(cohortName, dataStore, path, data, modification, null);
- }
-
- private DOMStoreThreePhaseCommitCohort setupMockWriteTransaction(final String cohortName,
- final InMemoryDOMDataStore dataStore, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data,
- final MutableCompositeModification modification,
- final Function<DOMStoreThreePhaseCommitCohort,ListenableFuture<Void>> preCommit) {
-
- DOMStoreWriteTransaction tx = dataStore.newWriteOnlyTransaction();
- tx.write(path, data);
- final DOMStoreThreePhaseCommitCohort realCohort = tx.ready();
- DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class, cohortName);
-
- doAnswer(new Answer<ListenableFuture<Boolean>>() {
- @Override
- public ListenableFuture<Boolean> answer(final InvocationOnMock invocation) {
- return realCohort.canCommit();
- }
- }).when(cohort).canCommit();
-
- doAnswer(new Answer<ListenableFuture<Void>>() {
- @Override
- public ListenableFuture<Void> answer(final InvocationOnMock invocation) throws Throwable {
- if(preCommit != null) {
- return preCommit.apply(realCohort);
- } else {
- return realCohort.preCommit();
- }
- }
- }).when(cohort).preCommit();
-
- doAnswer(new Answer<ListenableFuture<Void>>() {
- @Override
- public ListenableFuture<Void> answer(final InvocationOnMock invocation) throws Throwable {
- return realCohort.commit();
- }
- }).when(cohort).commit();
-
- doAnswer(new Answer<ListenableFuture<Void>>() {
- @Override
- public ListenableFuture<Void> answer(final InvocationOnMock invocation) throws Throwable {
- return realCohort.abort();
- }
- }).when(cohort).abort();
-
- modification.addModification(new WriteModification(path, data, SCHEMA_CONTEXT));
-
- return cohort;
+ return new ModificationPayload(compMod);
}
@SuppressWarnings({ "unchecked" })
waitUntilLeader(shard);
- // Setup 3 simulated transactions with mock cohorts backed by real cohorts.
+ // Setup 3 simulated transactions with mock cohorts backed by real cohorts.
InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore();
shard.tell(new ForwardedReadyTransaction(transactionID1, CURRENT_VERSION,
cohort1, modification1, true), getRef());
ReadyTransactionReply readyReply = ReadyTransactionReply.fromSerializable(
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS));
+ expectMsgClass(duration, ReadyTransactionReply.class));
assertEquals("Cohort path", shard.path().toString(), readyReply.getCohortPath());
// Send the CanCommitTransaction message for the first Tx.
shard.tell(new ForwardedReadyTransaction(transactionID2, CURRENT_VERSION,
cohort2, modification2, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
shard.tell(new ForwardedReadyTransaction(transactionID3, CURRENT_VERSION,
cohort3, modification3, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
// Send the CanCommitTransaction message for the next 2 Tx's. These should get queued and
// processed after the first Tx completes.
assertTrue("Missing leaf " + TestModel.ID_QNAME.getLocalName(), idLeaf.isPresent());
assertEquals(TestModel.ID_QNAME.getLocalName() + " value", 1, idLeaf.get().getValue());
- for(int i = 0; i < 20 * 5; i++) {
- long lastLogIndex = shard.underlyingActor().getShardMBean().getLastLogIndex();
- if(lastLogIndex == 2) {
- break;
+ verifyLastApplied(shard, 2);
+
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ }};
+ }
+
+ private BatchedModifications newBatchedModifications(String transactionID, YangInstanceIdentifier path,
+ NormalizedNode<?, ?> data, boolean ready) {
+ return newBatchedModifications(transactionID, null, path, data, ready);
+ }
+
+ private BatchedModifications newBatchedModifications(String transactionID, String transactionChainID,
+ YangInstanceIdentifier path, NormalizedNode<?, ?> data, boolean ready) {
+ BatchedModifications batched = new BatchedModifications(transactionID, CURRENT_VERSION, transactionChainID);
+ batched.addModification(new WriteModification(path, data));
+ batched.setReady(ready);
+ return batched;
+ }
+
+ @SuppressWarnings("unchecked")
+ @Test
+ public void testMultipleBatchedModifications() throws Throwable {
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()),
+ "testMultipleBatchedModifications");
+
+ waitUntilLeader(shard);
+
+ final String transactionID = "tx";
+ FiniteDuration duration = duration("5 seconds");
+
+ final AtomicReference<DOMStoreThreePhaseCommitCohort> mockCohort = new AtomicReference<>();
+ ShardCommitCoordinator.CohortDecorator cohortDecorator = new ShardCommitCoordinator.CohortDecorator() {
+ @Override
+ public DOMStoreThreePhaseCommitCohort decorate(String txID, DOMStoreThreePhaseCommitCohort actual) {
+ if(mockCohort.get() == null) {
+ mockCohort.set(createDelegatingMockCohort("cohort", actual));
+ }
+
+ return mockCohort.get();
}
- Uninterruptibles.sleepUninterruptibly(50, TimeUnit.MILLISECONDS);
- }
+ };
+
+ shard.underlyingActor().getCommitCoordinator().setCohortDecorator(cohortDecorator);
+
+ // Send a BatchedModifications to start a transaction.
+
+ shard.tell(newBatchedModifications(transactionID, TestModel.TEST_PATH,
+ ImmutableNodes.containerNode(TestModel.TEST_QNAME), false), getRef());
+ expectMsgClass(duration, BatchedModificationsReply.class);
+
+ // Send a couple more BatchedModifications.
+
+ shard.tell(newBatchedModifications(transactionID, TestModel.OUTER_LIST_PATH,
+ ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(), false), getRef());
+ expectMsgClass(duration, BatchedModificationsReply.class);
+
+ shard.tell(newBatchedModifications(transactionID, YangInstanceIdentifier.builder(
+ TestModel.OUTER_LIST_PATH).nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1).build(),
+ ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1), true), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.class);
+
+ // Send the CanCommitTransaction message.
+
+ shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ // Send the CanCommitTransaction message.
+
+ shard.tell(new CommitTransaction(transactionID).toSerializable(), getRef());
+ expectMsgClass(duration, CommitTransactionReply.SERIALIZABLE_CLASS);
+
+ InOrder inOrder = inOrder(mockCohort.get());
+ inOrder.verify(mockCohort.get()).canCommit();
+ inOrder.verify(mockCohort.get()).preCommit();
+ inOrder.verify(mockCohort.get()).commit();
+
+ // Verify data in the data store.
+
+ NormalizedNode<?, ?> outerList = readStore(shard, TestModel.OUTER_LIST_PATH);
+ assertNotNull(TestModel.OUTER_LIST_QNAME.getLocalName() + " not found", outerList);
+ assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " value is not Iterable",
+ outerList.getValue() instanceof Iterable);
+ Object entry = ((Iterable<Object>)outerList.getValue()).iterator().next();
+ assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " entry is not MapEntryNode",
+ entry instanceof MapEntryNode);
+ MapEntryNode mapEntry = (MapEntryNode)entry;
+ Optional<DataContainerChild<? extends PathArgument, ?>> idLeaf =
+ mapEntry.getChild(new YangInstanceIdentifier.NodeIdentifier(TestModel.ID_QNAME));
+ assertTrue("Missing leaf " + TestModel.ID_QNAME.getLocalName(), idLeaf.isPresent());
+ assertEquals(TestModel.ID_QNAME.getLocalName() + " value", 1, idLeaf.get().getValue());
+
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ }};
+ }
+
+ @Test
+ public void testBatchedModificationsOnTransactionChain() throws Throwable {
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()),
+ "testBatchedModificationsOnTransactionChain");
+
+ waitUntilLeader(shard);
+
+ String transactionChainID = "txChain";
+ String transactionID1 = "tx1";
+ String transactionID2 = "tx2";
+
+ FiniteDuration duration = duration("5 seconds");
+
+ // Send a BatchedModifications to start a chained write transaction and ready it.
- assertEquals("Last log index", 2, shard.underlyingActor().getShardMBean().getLastLogIndex());
+ ContainerNode containerNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
+ YangInstanceIdentifier path = TestModel.TEST_PATH;
+ shard.tell(newBatchedModifications(transactionID1, transactionChainID, path,
+ containerNode, true), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.class);
+
+ // Create a read Tx on the same chain.
+
+ shard.tell(new CreateTransaction(transactionID2, TransactionProxy.TransactionType.READ_ONLY.ordinal() ,
+ transactionChainID).toSerializable(), getRef());
+
+ CreateTransactionReply createReply = expectMsgClass(duration("3 seconds"), CreateTransactionReply.class);
+
+ getSystem().actorSelection(createReply.getTransactionActorPath()).tell(new ReadData(path), getRef());
+ ReadDataReply readReply = expectMsgClass(duration("3 seconds"), ReadDataReply.class);
+ assertEquals("Read node", containerNode, readReply.getNormalizedNode());
+
+ // Commit the write transaction.
+
+ shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ shard.tell(new CommitTransaction(transactionID1).toSerializable(), getRef());
+ expectMsgClass(duration, CommitTransactionReply.SERIALIZABLE_CLASS);
+
+ // Verify data in the data store.
+
+ NormalizedNode<?, ?> actualNode = readStore(shard, path);
+ assertEquals("Stored node", containerNode, actualNode);
shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
}};
}
+ @Test
+ public void testOnBatchedModificationsWhenNotLeader() {
+ final AtomicBoolean overrideLeaderCalls = new AtomicBoolean();
+ new ShardTestKit(getSystem()) {{
+ Creator<Shard> creator = new Creator<Shard>() {
+ @Override
+ public Shard create() throws Exception {
+ return new Shard(shardID, Collections.<String,String>emptyMap(),
+ newDatastoreContext(), SCHEMA_CONTEXT) {
+ @Override
+ protected boolean isLeader() {
+ return overrideLeaderCalls.get() ? false : super.isLeader();
+ }
+
+ @Override
+ protected ActorSelection getLeader() {
+ return overrideLeaderCalls.get() ? getSystem().actorSelection(getRef().path()) :
+ super.getLeader();
+ }
+ };
+ }
+ };
+
+ TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ Props.create(new DelegatingShardCreator(creator)), "testOnBatchedModificationsWhenNotLeader");
+
+ waitUntilLeader(shard);
+
+ overrideLeaderCalls.set(true);
+
+ BatchedModifications batched = new BatchedModifications("tx", DataStoreVersions.CURRENT_VERSION, "");
+
+ shard.tell(batched, ActorRef.noSender());
+
+ expectMsgEquals(batched);
+
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ }};
+ }
+
+ @Test
+ public void testCommitWithPersistenceDisabled() throws Throwable {
+ dataStoreContextBuilder.persistent(false);
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()),
+ "testCommitWithPersistenceDisabled");
+
+ waitUntilLeader(shard);
+
+ InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore();
+
+ // Setup a simulated transactions with a mock cohort.
+
+ String transactionID = "tx";
+ MutableCompositeModification modification = new MutableCompositeModification();
+ NormalizedNode<?, ?> containerNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
+ DOMStoreThreePhaseCommitCohort cohort = setupMockWriteTransaction("cohort", dataStore,
+ TestModel.TEST_PATH, containerNode, modification);
+
+ FiniteDuration duration = duration("5 seconds");
+
+ // Simulate the ForwardedReadyTransaction messages that would be sent
+ // by the ShardTransaction.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID, CURRENT_VERSION,
+ cohort, modification, true), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.class);
+
+ // Send the CanCommitTransaction message.
+
+ shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ // Send the CanCommitTransaction message.
+
+ shard.tell(new CommitTransaction(transactionID).toSerializable(), getRef());
+ expectMsgClass(duration, CommitTransactionReply.SERIALIZABLE_CLASS);
+
+ InOrder inOrder = inOrder(cohort);
+ inOrder.verify(cohort).canCommit();
+ inOrder.verify(cohort).preCommit();
+ inOrder.verify(cohort).commit();
+
+ NormalizedNode<?, ?> actualNode = readStore(shard, TestModel.TEST_PATH);
+ assertEquals(TestModel.TEST_QNAME.getLocalName(), containerNode, actualNode);
+
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ }};
+ }
+
+ @Test
+ public void testCommitWhenTransactionHasNoModifications(){
+ // Note that persistence is enabled which would normally result in the entry getting written to the journal
+ // but here that need not happen
+ new ShardTestKit(getSystem()) {
+ {
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()),
+ "testCommitWhenTransactionHasNoModifications");
+
+ waitUntilLeader(shard);
+
+ String transactionID = "tx1";
+ MutableCompositeModification modification = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1");
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort).canCommit();
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort).preCommit();
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort).commit();
+
+ FiniteDuration duration = duration("5 seconds");
+
+ // Simulate the ForwardedReadyTransaction messages that would be sent
+ // by the ShardTransaction.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID, CURRENT_VERSION,
+ cohort, modification, true), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.class);
+
+ // Send the CanCommitTransaction message.
+
+ shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ shard.tell(new CommitTransaction(transactionID).toSerializable(), getRef());
+ expectMsgClass(duration, ThreePhaseCommitCohortMessages.CommitTransactionReply.class);
+
+ InOrder inOrder = inOrder(cohort);
+ inOrder.verify(cohort).canCommit();
+ inOrder.verify(cohort).preCommit();
+ inOrder.verify(cohort).commit();
+
+ // Use MBean for verification
+ // Committed transaction count should increase as usual
+ assertEquals(1,shard.underlyingActor().getShardMBean().getCommittedTransactionsCount());
+
+ // Commit index should not advance because this does not go into the journal
+ assertEquals(-1, shard.underlyingActor().getShardMBean().getCommitIndex());
+
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+
+ }
+ };
+ }
+
+ @Test
+ public void testCommitWhenTransactionHasModifications(){
+ new ShardTestKit(getSystem()) {
+ {
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()),
+ "testCommitWhenTransactionHasModifications");
+
+ waitUntilLeader(shard);
+
+ String transactionID = "tx1";
+ MutableCompositeModification modification = new MutableCompositeModification();
+ modification.addModification(new DeleteModification(YangInstanceIdentifier.builder().build()));
+ DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1");
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort).canCommit();
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort).preCommit();
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort).commit();
+
+ FiniteDuration duration = duration("5 seconds");
+
+ // Simulate the ForwardedReadyTransaction messages that would be sent
+ // by the ShardTransaction.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID, CURRENT_VERSION,
+ cohort, modification, true), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.class);
+
+ // Send the CanCommitTransaction message.
+
+ shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ shard.tell(new CommitTransaction(transactionID).toSerializable(), getRef());
+ expectMsgClass(duration, ThreePhaseCommitCohortMessages.CommitTransactionReply.class);
+
+ InOrder inOrder = inOrder(cohort);
+ inOrder.verify(cohort).canCommit();
+ inOrder.verify(cohort).preCommit();
+ inOrder.verify(cohort).commit();
+
+ // Use MBean for verification
+ // Committed transaction count should increase as usual
+ assertEquals(1, shard.underlyingActor().getShardMBean().getCommittedTransactionsCount());
+
+ // Commit index should advance as we do not have an empty modification
+ assertEquals(0, shard.underlyingActor().getShardMBean().getCommitIndex());
+
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+
+ }
+ };
+ }
+
@Test
public void testCommitPhaseFailure() throws Throwable {
new ShardTestKit(getSystem()) {{
waitUntilLeader(shard);
- // Setup 2 simulated transactions with mock cohorts. The first one fails in the
+ // Setup 2 simulated transactions with mock cohorts. The first one fails in the
// commit phase.
String transactionID1 = "tx1";
shard.tell(new ForwardedReadyTransaction(transactionID1, CURRENT_VERSION,
cohort1, modification1, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
shard.tell(new ForwardedReadyTransaction(transactionID2, CURRENT_VERSION,
cohort2, modification2, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
// Send the CanCommitTransaction message for the first Tx.
shard.tell(new ForwardedReadyTransaction(transactionID, CURRENT_VERSION,
cohort, modification, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
// Send the CanCommitTransaction message.
shard.tell(new ForwardedReadyTransaction(transactionID, CURRENT_VERSION,
cohort, modification, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
// Send the CanCommitTransaction message.
waitUntilLeader(shard);
final FiniteDuration duration = duration("5 seconds");
- final Timeout timeout = new Timeout(duration);
-
InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore();
final String transactionID = "tx1";
- final CountDownLatch abortComplete = new CountDownLatch(1);
Function<DOMStoreThreePhaseCommitCohort,ListenableFuture<Void>> preCommit =
new Function<DOMStoreThreePhaseCommitCohort,ListenableFuture<Void>>() {
@Override
public ListenableFuture<Void> apply(final DOMStoreThreePhaseCommitCohort cohort) {
ListenableFuture<Void> preCommitFuture = cohort.preCommit();
- Future<Object> abortFuture = Patterns.ask(shard,
- new AbortTransaction(transactionID).toSerializable(), timeout);
- abortFuture.onComplete(new OnComplete<Object>() {
- @Override
- public void onComplete(final Throwable e, final Object resp) {
- abortComplete.countDown();
- }
- }, getSystem().dispatcher());
+ // Simulate an AbortTransaction message occurring during replication, after
+ // persisting and before finishing the commit to the in-memory store.
+ // We have no followers so due to optimizations in the RaftActor, it does not
+ // attempt replication and thus we can't send an AbortTransaction message b/c
+ // it would be processed too late after CommitTransaction completes. So we'll
+ // simulate an AbortTransaction message occurring during replication by calling
+ // the shard directly.
+ //
+ shard.underlyingActor().doAbortTransaction(transactionID, null);
return preCommitFuture;
}
shard.tell(new ForwardedReadyTransaction(transactionID, CURRENT_VERSION,
cohort, modification, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
assertEquals("Can commit", true, canCommitReply.getCanCommit());
- Future<Object> commitFuture = Patterns.ask(shard,
- new CommitTransaction(transactionID).toSerializable(), timeout);
-
- assertEquals("Abort complete", true, abortComplete.await(5, TimeUnit.SECONDS));
-
- Await.result(commitFuture, duration);
+ shard.tell(new CommitTransaction(transactionID).toSerializable(), getRef());
+ expectMsgClass(duration, CommitTransactionReply.SERIALIZABLE_CLASS);
NormalizedNode<?, ?> node = readStore(shard, TestModel.TEST_PATH);
+
+ // Since we're simulating an abort occurring during replication and before finish commit,
+ // the data should still get written to the in-memory store since we've gotten past
+ // canCommit and preCommit and persisted the data.
assertNotNull(TestModel.TEST_QNAME.getLocalName() + " not found", node);
shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
@Test
public void testTransactionCommitTimeout() throws Throwable {
- dataStoreContext = DatastoreContext.newBuilder().shardTransactionCommitTimeoutInSeconds(1).build();
+ dataStoreContextBuilder.shardTransactionCommitTimeoutInSeconds(1);
new ShardTestKit(getSystem()) {{
final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
shard.tell(new ForwardedReadyTransaction(transactionID1, CURRENT_VERSION,
cohort1, modification1, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
shard.tell(new ForwardedReadyTransaction(transactionID2, CURRENT_VERSION,
cohort2, modification2, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
// canCommit 1st Tx. We don't send the commit so it should timeout.
@Test
public void testTransactionCommitQueueCapacityExceeded() throws Throwable {
- dataStoreContext = DatastoreContext.newBuilder().shardTransactionCommitQueueCapacity(1).build();
+ dataStoreContextBuilder.shardTransactionCommitQueueCapacity(1);
new ShardTestKit(getSystem()) {{
final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
shard.tell(new ForwardedReadyTransaction(transactionID1, CURRENT_VERSION,
cohort1, modification1, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
shard.tell(new ForwardedReadyTransaction(transactionID2, CURRENT_VERSION,
cohort2, modification2, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
shard.tell(new ForwardedReadyTransaction(transactionID3, CURRENT_VERSION,
cohort3, modification3, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
// canCommit 1st Tx.
shard.tell(new ForwardedReadyTransaction(transactionID1, CURRENT_VERSION,
cohort1, modification1, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
shard.tell(new ForwardedReadyTransaction(transactionID2, CURRENT_VERSION,
cohort2, modification2, true), getRef());
- expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ expectMsgClass(duration, ReadyTransactionReply.class);
// Send the CanCommitTransaction message for the first Tx.
// Wait for the 2nd Tx to complete the canCommit phase.
- final CountDownLatch latch = new CountDownLatch(1);
- canCommitFuture.onComplete(new OnComplete<Object>() {
- @Override
- public void onComplete(final Throwable t, final Object resp) {
- latch.countDown();
- }
- }, getSystem().dispatcher());
-
- assertEquals("2nd CanCommit complete", true, latch.await(5, TimeUnit.SECONDS));
+ Await.ready(canCommitFuture, duration);
InOrder inOrder = inOrder(cohort1, cohort2);
inOrder.verify(cohort1).canCommit();
}
@Test
- public void testCreateSnapshot() throws IOException, InterruptedException {
- testCreateSnapshot(true, "testCreateSnapshot");
+ public void testCreateSnapshot() throws Exception {
+ testCreateSnapshot(true, "testCreateSnapshot");
}
@Test
- public void testCreateSnapshotWithNonPersistentData() throws IOException, InterruptedException {
+ public void testCreateSnapshotWithNonPersistentData() throws Exception {
testCreateSnapshot(false, "testCreateSnapshotWithNonPersistentData");
}
@SuppressWarnings("serial")
- public void testCreateSnapshot(final boolean persistent, final String shardActorName) throws IOException, InterruptedException {
- final DatastoreContext dataStoreContext = DatastoreContext.newBuilder().
- shardJournalRecoveryLogBatchSize(3).shardSnapshotBatchCount(5000).persistent(persistent).build();
+ public void testCreateSnapshot(final boolean persistent, final String shardActorName) throws Exception{
+
+ final AtomicReference<Object> savedSnapshot = new AtomicReference<>();
+ class TestPersistentDataProvider extends DelegatingPersistentDataProvider {
+ TestPersistentDataProvider(DataPersistenceProvider delegate) {
+ super(delegate);
+ }
+
+ @Override
+ public void saveSnapshot(Object o) {
+ savedSnapshot.set(o);
+ super.saveSnapshot(o);
+ }
+ }
+
+ dataStoreContextBuilder.persistent(persistent);
new ShardTestKit(getSystem()) {{
final AtomicReference<CountDownLatch> latch = new AtomicReference<>(new CountDownLatch(1));
+
+ class TestShard extends Shard {
+
+ protected TestShard(ShardIdentifier name, Map<String, String> peerAddresses,
+ DatastoreContext datastoreContext, SchemaContext schemaContext) {
+ super(name, peerAddresses, datastoreContext, schemaContext);
+ setPersistence(new TestPersistentDataProvider(super.persistence()));
+ }
+
+ @Override
+ protected void commitSnapshot(final long sequenceNumber) {
+ super.commitSnapshot(sequenceNumber);
+ latch.get().countDown();
+ }
+
+ @Override
+ public RaftActorContext getRaftActorContext() {
+ return super.getRaftActorContext();
+ }
+ }
+
Creator<Shard> creator = new Creator<Shard>() {
@Override
public Shard create() throws Exception {
- return new Shard(shardID, Collections.<ShardIdentifier,String>emptyMap(),
- dataStoreContext, SCHEMA_CONTEXT) {
- @Override
- protected void commitSnapshot(final long sequenceNumber) {
- super.commitSnapshot(sequenceNumber);
- latch.get().countDown();
- }
- };
+ return new TestShard(shardID, Collections.<String,String>emptyMap(),
+ newDatastoreContext(), SCHEMA_CONTEXT);
}
};
waitUntilLeader(shard);
- shard.tell(new CaptureSnapshot(-1,-1,-1,-1), getRef());
+ writeToStore(shard, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
+
+ NormalizedNode<?,?> expectedRoot = readStore(shard, YangInstanceIdentifier.builder().build());
+
+ // Trigger creation of a snapshot by ensuring
+ RaftActorContext raftActorContext = ((TestShard) shard.underlyingActor()).getRaftActorContext();
+ raftActorContext.getSnapshotManager().capture(mock(ReplicatedLogEntry.class), -1);
assertEquals("Snapshot saved", true, latch.get().await(5, TimeUnit.SECONDS));
+ assertTrue("Invalid saved snapshot " + savedSnapshot.get(),
+ savedSnapshot.get() instanceof Snapshot);
+
+ verifySnapshot((Snapshot)savedSnapshot.get(), expectedRoot);
+
latch.set(new CountDownLatch(1));
- shard.tell(new CaptureSnapshot(-1,-1,-1,-1), getRef());
+ savedSnapshot.set(null);
+
+ raftActorContext.getSnapshotManager().capture(mock(ReplicatedLogEntry.class), -1);
assertEquals("Snapshot saved", true, latch.get().await(5, TimeUnit.SECONDS));
+ assertTrue("Invalid saved snapshot " + savedSnapshot.get(),
+ savedSnapshot.get() instanceof Snapshot);
+
+ verifySnapshot((Snapshot)savedSnapshot.get(), expectedRoot);
+
shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ }
+
+ private void verifySnapshot(Snapshot snapshot, NormalizedNode<?,?> expectedRoot) {
+
+ NormalizedNode<?, ?> actual = SerializationUtils.deserializeNormalizedNode(snapshot.getState());
+ assertEquals("Root node", expectedRoot, actual);
+
}};
}
final DatastoreContext persistentContext = DatastoreContext.newBuilder().
shardJournalRecoveryLogBatchSize(3).shardSnapshotBatchCount(5000).persistent(true).build();
- final Props persistentProps = Shard.props(shardID, Collections.<ShardIdentifier, String>emptyMap(),
+ final Props persistentProps = Shard.props(shardID, Collections.<String, String>emptyMap(),
persistentContext, SCHEMA_CONTEXT);
final DatastoreContext nonPersistentContext = DatastoreContext.newBuilder().
shardJournalRecoveryLogBatchSize(3).shardSnapshotBatchCount(5000).persistent(false).build();
- final Props nonPersistentProps = Shard.props(shardID, Collections.<ShardIdentifier, String>emptyMap(),
+ final Props nonPersistentProps = Shard.props(shardID, Collections.<String, String>emptyMap(),
nonPersistentContext, SCHEMA_CONTEXT);
new ShardTestKit(getSystem()) {{
TestActorRef<Shard> shard1 = TestActorRef.create(getSystem(),
persistentProps, "testPersistence1");
- assertTrue("Recovery Applicable", shard1.underlyingActor().getDataPersistenceProvider().isRecoveryApplicable());
+ assertTrue("Recovery Applicable", shard1.underlyingActor().persistence().isRecoveryApplicable());
shard1.tell(PoisonPill.getInstance(), ActorRef.noSender());
TestActorRef<Shard> shard2 = TestActorRef.create(getSystem(),
nonPersistentProps, "testPersistence2");
- assertFalse("Recovery Not Applicable", shard2.underlyingActor().getDataPersistenceProvider().isRecoveryApplicable());
+ assertFalse("Recovery Not Applicable", shard2.underlyingActor().persistence().isRecoveryApplicable());
shard2.tell(PoisonPill.getInstance(), ActorRef.noSender());
}
+ @Test
+ public void testOnDatastoreContext() {
+ new ShardTestKit(getSystem()) {{
+ dataStoreContextBuilder.persistent(true);
- private NormalizedNode<?, ?> readStore(final InMemoryDOMDataStore store) throws ReadFailedException {
- DOMStoreReadTransaction transaction = store.newReadOnlyTransaction();
- CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> read =
- transaction.read(YangInstanceIdentifier.builder().build());
+ TestActorRef<Shard> shard = TestActorRef.create(getSystem(), newShardProps(), "testOnDatastoreContext");
- Optional<NormalizedNode<?, ?>> optional = read.checkedGet();
+ assertEquals("isRecoveryApplicable", true,
+ shard.underlyingActor().persistence().isRecoveryApplicable());
- NormalizedNode<?, ?> normalizedNode = optional.get();
+ waitUntilLeader(shard);
- transaction.close();
+ shard.tell(dataStoreContextBuilder.persistent(false).build(), ActorRef.noSender());
- return normalizedNode;
- }
+ assertEquals("isRecoveryApplicable", false,
+ shard.underlyingActor().persistence().isRecoveryApplicable());
- private void commitTransaction(final DOMStoreWriteTransaction transaction) {
- DOMStoreThreePhaseCommitCohort commitCohort = transaction.ready();
- ListenableFuture<Void> future =
- commitCohort.preCommit();
- try {
- future.get();
- future = commitCohort.commit();
- future.get();
- } catch (InterruptedException | ExecutionException e) {
- }
- }
+ shard.tell(dataStoreContextBuilder.persistent(true).build(), ActorRef.noSender());
- private AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>> noOpDataChangeListener() {
- return new AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>() {
- @Override
- public void onDataChanged(
- final AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
+ assertEquals("isRecoveryApplicable", true,
+ shard.underlyingActor().persistence().isRecoveryApplicable());
- }
- };
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ }};
}
- static NormalizedNode<?,?> readStore(final TestActorRef<Shard> shard, final YangInstanceIdentifier id)
- throws ExecutionException, InterruptedException {
- DOMStoreReadTransaction transaction = shard.underlyingActor().getDataStore().newReadOnlyTransaction();
+ @Test
+ public void testRegisterRoleChangeListener() throws Exception {
+ new ShardTestKit(getSystem()) {
+ {
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()),
+ "testRegisterRoleChangeListener");
+
+ waitUntilLeader(shard);
+
+ TestActorRef<MessageCollectorActor> listener =
+ TestActorRef.create(getSystem(), Props.create(MessageCollectorActor.class));
- CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> future =
- transaction.read(id);
+ shard.tell(new RegisterRoleChangeListener(), listener);
- Optional<NormalizedNode<?, ?>> optional = future.get();
- NormalizedNode<?, ?> node = optional.isPresent()? optional.get() : null;
+ // TODO: MessageCollectorActor exists as a test util in both the akka-raft and distributed-datastore
+ // projects. Need to move it to commons as a regular utility and then we can get rid of this arbitrary
+ // sleep.
+ Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS);
- transaction.close();
+ List<Object> allMatching = MessageCollectorActor.getAllMatching(listener, RegisterRoleChangeListenerReply.class);
- return node;
+ assertEquals(1, allMatching.size());
+ }
+ };
}
- private void writeToStore(final TestActorRef<Shard> shard, final YangInstanceIdentifier id, final NormalizedNode<?,?> node)
- throws ExecutionException, InterruptedException {
- DOMStoreWriteTransaction transaction = shard.underlyingActor().getDataStore().newWriteOnlyTransaction();
+ @Test
+ public void testFollowerInitialSyncStatus() throws Exception {
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()),
+ "testFollowerInitialSyncStatus");
- transaction.write(id, node);
+ shard.underlyingActor().onReceiveCommand(new FollowerInitialSyncUpStatus(false, "member-1-shard-inventory-operational"));
- DOMStoreThreePhaseCommitCohort commitCohort = transaction.ready();
- commitCohort.preCommit().get();
- commitCohort.commit().get();
- }
+ assertEquals(false, shard.underlyingActor().getShardMBean().getFollowerInitialSyncStatus());
- @SuppressWarnings("serial")
- private static final class DelegatingShardCreator implements Creator<Shard> {
- private final Creator<Shard> delegate;
+ shard.underlyingActor().onReceiveCommand(new FollowerInitialSyncUpStatus(true, "member-1-shard-inventory-operational"));
- DelegatingShardCreator(final Creator<Shard> delegate) {
- this.delegate = delegate;
- }
+ assertEquals(true, shard.underlyingActor().getShardMBean().getFollowerInitialSyncStatus());
+
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ }
- @Override
- public Shard create() throws Exception {
- return delegate.create();
+ private void commitTransaction(final DOMStoreWriteTransaction transaction) {
+ DOMStoreThreePhaseCommitCohort commitCohort = transaction.ready();
+ ListenableFuture<Void> future =
+ commitCohort.preCommit();
+ try {
+ future.get();
+ future = commitCohort.commit();
+ future.get();
+ } catch (InterruptedException | ExecutionException e) {
}
}
}