X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?a=blobdiff_plain;f=opendaylight%2Fmd-sal%2Fsal-distributed-datastore%2Fsrc%2Ftest%2Fjava%2Forg%2Fopendaylight%2Fcontroller%2Fcluster%2Fdatastore%2FShardTest.java;h=14fc3a12bd9b97bf891586eb3902c502fa18e142;hb=29aa3b83db308b1a511857db8d918e0f2e629407;hp=f183bb319ee04df852134264ec66a475a53c13ff;hpb=531621aac4cff9d39cbd8668a53bdeba8a0e6d81;p=controller.git diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTest.java index f183bb319e..b1e31380a4 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTest.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTest.java @@ -1,1162 +1,2374 @@ +/* + * Copyright (c) 2014, 2015 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + package org.opendaylight.controller.cluster.datastore; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.opendaylight.controller.cluster.datastore.DataStoreVersions.CURRENT_VERSION; + import akka.actor.ActorRef; +import akka.actor.ActorSelection; import akka.actor.Props; +import akka.actor.Status.Failure; import akka.dispatch.Dispatchers; import akka.dispatch.OnComplete; import akka.japi.Creator; import akka.pattern.Patterns; -import akka.testkit.JavaTestKit; +import akka.persistence.SaveSnapshotSuccess; import akka.testkit.TestActorRef; import akka.util.Timeout; -import com.google.common.base.Function; -import com.google.common.base.Optional; -import com.google.common.util.concurrent.CheckedFuture; -import com.google.common.util.concurrent.Futures; -import com.google.common.util.concurrent.ListenableFuture; -import com.google.common.util.concurrent.MoreExecutors; -import org.junit.After; -import org.junit.Before; +import com.google.common.base.Stopwatch; +import com.google.common.base.Throwables; +import com.google.common.util.concurrent.Uninterruptibles; +import java.io.IOException; +import java.util.Collections; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import org.junit.Test; import org.mockito.InOrder; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; +import org.opendaylight.controller.cluster.DataPersistenceProvider; +import org.opendaylight.controller.cluster.DelegatingPersistentDataProvider; +import org.opendaylight.controller.cluster.access.concepts.LocalHistoryIdentifier; +import org.opendaylight.controller.cluster.access.concepts.MemberName; +import org.opendaylight.controller.cluster.access.concepts.TransactionIdentifier; +import org.opendaylight.controller.cluster.datastore.exceptions.NoShardLeaderException; import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier; +import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats; import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction; import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply; +import org.opendaylight.controller.cluster.datastore.messages.BatchedModifications; +import org.opendaylight.controller.cluster.datastore.messages.BatchedModificationsReply; import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction; import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply; import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction; import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply; import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction; -import org.opendaylight.controller.cluster.datastore.messages.EnableNotification; +import org.opendaylight.controller.cluster.datastore.messages.CreateTransactionReply; import org.opendaylight.controller.cluster.datastore.messages.ForwardedReadyTransaction; import org.opendaylight.controller.cluster.datastore.messages.PeerAddressResolved; +import org.opendaylight.controller.cluster.datastore.messages.ReadData; +import org.opendaylight.controller.cluster.datastore.messages.ReadDataReply; +import org.opendaylight.controller.cluster.datastore.messages.ReadyLocalTransaction; import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply; import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListener; import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListenerReply; +import org.opendaylight.controller.cluster.datastore.messages.RegisterDataTreeChangeListener; +import org.opendaylight.controller.cluster.datastore.messages.RegisterDataTreeChangeListenerReply; +import org.opendaylight.controller.cluster.datastore.messages.ShardLeaderStateChanged; import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext; import org.opendaylight.controller.cluster.datastore.modification.MergeModification; -import org.opendaylight.controller.cluster.datastore.modification.Modification; import org.opendaylight.controller.cluster.datastore.modification.MutableCompositeModification; import org.opendaylight.controller.cluster.datastore.modification.WriteModification; -import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec; -import org.opendaylight.controller.cluster.datastore.utils.InMemoryJournal; -import org.opendaylight.controller.cluster.datastore.utils.InMemorySnapshotStore; +import org.opendaylight.controller.cluster.datastore.persisted.MetadataShardDataTreeSnapshot; +import org.opendaylight.controller.cluster.datastore.persisted.ShardDataTreeSnapshot; +import org.opendaylight.controller.cluster.datastore.utils.MockDataChangeListener; +import org.opendaylight.controller.cluster.datastore.utils.MockDataTreeChangeListener; +import org.opendaylight.controller.cluster.notifications.RegisterRoleChangeListener; +import org.opendaylight.controller.cluster.notifications.RegisterRoleChangeListenerReply; +import org.opendaylight.controller.cluster.raft.RaftActorContext; import org.opendaylight.controller.cluster.raft.ReplicatedLogEntry; -import org.opendaylight.controller.cluster.raft.ReplicatedLogImplEntry; import org.opendaylight.controller.cluster.raft.Snapshot; -import org.opendaylight.controller.cluster.raft.base.messages.ApplyLogEntries; import org.opendaylight.controller.cluster.raft.base.messages.ApplySnapshot; import org.opendaylight.controller.cluster.raft.base.messages.ApplyState; -import org.opendaylight.controller.cluster.raft.base.messages.CaptureSnapshot; -import org.opendaylight.controller.cluster.raft.protobuff.client.messages.CompositeModificationPayload; +import org.opendaylight.controller.cluster.raft.base.messages.ElectionTimeout; +import org.opendaylight.controller.cluster.raft.base.messages.FollowerInitialSyncUpStatus; +import org.opendaylight.controller.cluster.raft.base.messages.TimeoutNow; +import org.opendaylight.controller.cluster.raft.client.messages.FindLeader; +import org.opendaylight.controller.cluster.raft.client.messages.FindLeaderReply; +import org.opendaylight.controller.cluster.raft.client.messages.GetOnDemandRaftState; +import org.opendaylight.controller.cluster.raft.client.messages.OnDemandRaftState; +import org.opendaylight.controller.cluster.raft.messages.RequestVote; +import org.opendaylight.controller.cluster.raft.messages.ServerRemoved; +import org.opendaylight.controller.cluster.raft.persisted.ApplyJournalEntries; +import org.opendaylight.controller.cluster.raft.persisted.SimpleReplicatedLogEntry; +import org.opendaylight.controller.cluster.raft.policy.DisableElectionsRaftPolicy; import org.opendaylight.controller.cluster.raft.protobuff.client.messages.Payload; +import org.opendaylight.controller.cluster.raft.utils.InMemoryJournal; +import org.opendaylight.controller.cluster.raft.utils.MessageCollectorActor; import org.opendaylight.controller.md.cluster.datastore.model.SchemaContextHelper; import org.opendaylight.controller.md.cluster.datastore.model.TestModel; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker; -import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent; -import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener; import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException; -import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore; -import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreFactory; -import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages; -import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.CreateTransactionReply; -import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction; -import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort; -import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; -import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument; -import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild; -import org.opendaylight.yangtools.yang.data.api.schema.MapEntryNode; +import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode; +import org.opendaylight.yangtools.yang.data.api.schema.MapNode; import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode; +import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTree; +import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeCandidate; +import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeModification; +import org.opendaylight.yangtools.yang.data.api.schema.tree.DataValidationFailedException; +import org.opendaylight.yangtools.yang.data.api.schema.tree.TipProducingDataTree; +import org.opendaylight.yangtools.yang.data.api.schema.tree.TreeType; import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes; -import org.opendaylight.yangtools.yang.model.api.SchemaContext; +import org.opendaylight.yangtools.yang.data.impl.schema.builder.impl.ImmutableContainerNodeBuilder; +import org.opendaylight.yangtools.yang.data.impl.schema.tree.InMemoryDataTreeFactory; import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.FiniteDuration; -import java.io.IOException; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.inOrder; -public class ShardTest extends AbstractActorTest { +public class ShardTest extends AbstractShardTest { + private static final String DUMMY_DATA = "Dummy data as snapshot sequence number is set to 0 in " + + "InMemorySnapshotStore and journal recovery seq number will start from 1"; - private static final SchemaContext SCHEMA_CONTEXT = TestModel.createTestContext(); + @Test + public void testRegisterChangeListener() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testRegisterChangeListener"); - private static final ShardIdentifier IDENTIFIER = ShardIdentifier.builder().memberName("member-1") - .shardName("inventory").type("config").build(); + waitUntilLeader(shard); - private static final AtomicInteger NEXT_SHARD_NUM = new AtomicInteger(); + shard.tell(new UpdateSchemaContext(SchemaContextHelper.full()), ActorRef.noSender()); - private static String shardName() { - return "shard" + NEXT_SHARD_NUM.getAndIncrement(); - } + final MockDataChangeListener listener = new MockDataChangeListener(1); + final ActorRef dclActor = actorFactory.createActor(DataChangeListener.props(listener), + "testRegisterChangeListener-DataChangeListener"); - private DatastoreContext dataStoreContext = DatastoreContext.newBuilder(). - shardJournalRecoveryLogBatchSize(3).shardSnapshotBatchCount(5000).build(); + shard.tell(new RegisterChangeListener(TestModel.TEST_PATH, dclActor, + AsyncDataBroker.DataChangeScope.BASE, true), getRef()); - @Before - public void setUp() { - System.setProperty("shard.persistent", "false"); + final RegisterChangeListenerReply reply = expectMsgClass(duration("3 seconds"), + RegisterChangeListenerReply.class); + final String replyPath = reply.getListenerRegistrationPath().toString(); + assertTrue("Incorrect reply path: " + replyPath, + replyPath.matches("akka:\\/\\/test\\/user\\/testRegisterChangeListener\\/\\$.*")); - InMemorySnapshotStore.clear(); - InMemoryJournal.clear(); - } + final YangInstanceIdentifier path = TestModel.TEST_PATH; + writeToStore(shard, path, ImmutableNodes.containerNode(TestModel.TEST_QNAME)); - @After - public void tearDown() { - InMemorySnapshotStore.clear(); - InMemoryJournal.clear(); + listener.waitForChangeEvents(path); + } + }; } - private Props newShardProps() { - return Shard.props(IDENTIFIER, Collections.emptyMap(), - dataStoreContext, SCHEMA_CONTEXT); + @SuppressWarnings("serial") + @Test + public void testChangeListenerNotifiedWhenNotTheLeaderOnRegistration() throws Exception { + // This test tests the timing window in which a change listener is registered before the + // shard becomes the leader. We verify that the listener is registered and notified of the + // existing data when the shard becomes the leader. + // For this test, we want to send the RegisterChangeListener message after the shard + // has recovered from persistence and before it becomes the leader. So we subclass + // Shard to override onReceiveCommand and, when the first ElectionTimeout is received, + // we know that the shard has been initialized to a follower and has started the + // election process. The following 2 CountDownLatches are used to coordinate the + // ElectionTimeout with the sending of the RegisterChangeListener message. + final CountDownLatch onFirstElectionTimeout = new CountDownLatch(1); + final CountDownLatch onChangeListenerRegistered = new CountDownLatch(1); + final Creator creator = new Creator() { + boolean firstElectionTimeout = true; + + @Override + public Shard create() throws Exception { + // Use a non persistent provider because this test actually invokes persist on the journal + // this will cause all other messages to not be queued properly after that. + // The basic issue is that you cannot use TestActorRef with a persistent actor (at least when + // it does do a persist) + return new Shard(newShardBuilder()) { + @Override + public void handleCommand(final Object message) { + if (message instanceof ElectionTimeout && firstElectionTimeout) { + // Got the first ElectionTimeout. We don't forward it to the + // base Shard yet until we've sent the RegisterChangeListener + // message. So we signal the onFirstElectionTimeout latch to tell + // the main thread to send the RegisterChangeListener message and + // start a thread to wait on the onChangeListenerRegistered latch, + // which the main thread signals after it has sent the message. + // After the onChangeListenerRegistered is triggered, we send the + // original ElectionTimeout message to proceed with the election. + firstElectionTimeout = false; + final ActorRef self = getSelf(); + new Thread() { + @Override + public void run() { + Uninterruptibles.awaitUninterruptibly( + onChangeListenerRegistered, 5, TimeUnit.SECONDS); + self.tell(message, self); + } + }.start(); + + onFirstElectionTimeout.countDown(); + } else { + super.handleCommand(message); + } + } + }; + } + }; + + setupInMemorySnapshotStore(); + + final MockDataChangeListener listener = new MockDataChangeListener(1); + final ActorRef dclActor = actorFactory.createActor(DataChangeListener.props(listener), + "testRegisterChangeListenerWhenNotLeaderInitially-DataChangeListener"); + + final TestActorRef shard = actorFactory.createTestActor( + Props.create(new DelegatingShardCreator(creator)).withDispatcher(Dispatchers.DefaultDispatcherId()), + "testRegisterChangeListenerWhenNotLeaderInitially"); + + new ShardTestKit(getSystem()) { + { + final YangInstanceIdentifier path = TestModel.TEST_PATH; + + // Wait until the shard receives the first ElectionTimeout + // message. + assertEquals("Got first ElectionTimeout", true, onFirstElectionTimeout.await(5, TimeUnit.SECONDS)); + + // Now send the RegisterChangeListener and wait for the reply. + shard.tell(new RegisterChangeListener(path, dclActor, AsyncDataBroker.DataChangeScope.SUBTREE, false), + getRef()); + + final RegisterChangeListenerReply reply = expectMsgClass(duration("5 seconds"), + RegisterChangeListenerReply.class); + assertNotNull("getListenerRegistrationPath", reply.getListenerRegistrationPath()); + + // Sanity check - verify the shard is not the leader yet. + shard.tell(FindLeader.INSTANCE, getRef()); + final FindLeaderReply findLeadeReply = expectMsgClass(duration("5 seconds"), FindLeaderReply.class); + assertFalse("Expected the shard not to be the leader", findLeadeReply.getLeaderActor().isPresent()); + + // Signal the onChangeListenerRegistered latch to tell the + // thread above to proceed + // with the election process. + onChangeListenerRegistered.countDown(); + + // Wait for the shard to become the leader and notify our + // listener with the existing + // data in the store. + listener.waitForChangeEvents(path); + } + }; } @Test - public void testOnReceiveRegisterListener() throws Exception { - new JavaTestKit(getSystem()) {{ - ActorRef subject = getSystem().actorOf(newShardProps(), "testRegisterChangeListener"); + public void testRegisterDataTreeChangeListener() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testRegisterDataTreeChangeListener"); + + waitUntilLeader(shard); + + shard.tell(new UpdateSchemaContext(SchemaContextHelper.full()), ActorRef.noSender()); + + final MockDataTreeChangeListener listener = new MockDataTreeChangeListener(1); + final ActorRef dclActor = actorFactory.createActor(DataTreeChangeListenerActor.props(listener), + "testRegisterDataTreeChangeListener-DataTreeChangeListener"); - subject.tell(new UpdateSchemaContext(SchemaContextHelper.full()), getRef()); + shard.tell(new RegisterDataTreeChangeListener(TestModel.TEST_PATH, dclActor, false), getRef()); - subject.tell(new RegisterChangeListener(TestModel.TEST_PATH, - getRef().path(), AsyncDataBroker.DataChangeScope.BASE), getRef()); + final RegisterDataTreeChangeListenerReply reply = expectMsgClass(duration("3 seconds"), + RegisterDataTreeChangeListenerReply.class); + final String replyPath = reply.getListenerRegistrationPath().toString(); + assertTrue("Incorrect reply path: " + replyPath, + replyPath.matches("akka:\\/\\/test\\/user\\/testRegisterDataTreeChangeListener\\/\\$.*")); - EnableNotification enable = expectMsgClass(duration("3 seconds"), EnableNotification.class); - assertEquals("isEnabled", false, enable.isEnabled()); + final YangInstanceIdentifier path = TestModel.TEST_PATH; + writeToStore(shard, path, ImmutableNodes.containerNode(TestModel.TEST_QNAME)); - RegisterChangeListenerReply reply = expectMsgClass(duration("3 seconds"), - RegisterChangeListenerReply.class); - assertTrue(reply.getListenerRegistrationPath().toString().matches( - "akka:\\/\\/test\\/user\\/testRegisterChangeListener\\/\\$.*")); - }}; + listener.waitForChangeEvents(); + } + }; } + @SuppressWarnings("serial") @Test - public void testCreateTransaction(){ - new ShardTestKit(getSystem()) {{ - ActorRef subject = getSystem().actorOf(newShardProps(), "testCreateTransaction"); + public void testDataTreeChangeListenerNotifiedWhenNotTheLeaderOnRegistration() throws Exception { + final CountDownLatch onFirstElectionTimeout = new CountDownLatch(1); + final CountDownLatch onChangeListenerRegistered = new CountDownLatch(1); + final Creator creator = new Creator() { + boolean firstElectionTimeout = true; + + @Override + public Shard create() throws Exception { + return new Shard(newShardBuilder()) { + @Override + public void handleCommand(final Object message) { + if (message instanceof ElectionTimeout && firstElectionTimeout) { + firstElectionTimeout = false; + final ActorRef self = getSelf(); + new Thread() { + @Override + public void run() { + Uninterruptibles.awaitUninterruptibly( + onChangeListenerRegistered, 5, TimeUnit.SECONDS); + self.tell(message, self); + } + }.start(); + + onFirstElectionTimeout.countDown(); + } else { + super.handleCommand(message); + } + } + }; + } + }; - waitUntilLeader(subject); + setupInMemorySnapshotStore(); - subject.tell(new UpdateSchemaContext(TestModel.createTestContext()), getRef()); + final MockDataTreeChangeListener listener = new MockDataTreeChangeListener(1); + final ActorRef dclActor = actorFactory.createActor(DataTreeChangeListenerActor.props(listener), + "testDataTreeChangeListenerNotifiedWhenNotTheLeaderOnRegistration-DataChangeListener"); - subject.tell(new CreateTransaction("txn-1", - TransactionProxy.TransactionType.READ_ONLY.ordinal() ).toSerializable(), getRef()); + final TestActorRef shard = actorFactory.createTestActor( + Props.create(new DelegatingShardCreator(creator)).withDispatcher(Dispatchers.DefaultDispatcherId()), + "testDataTreeChangeListenerNotifiedWhenNotTheLeaderOnRegistration"); - CreateTransactionReply reply = expectMsgClass(duration("3 seconds"), - CreateTransactionReply.class); + final YangInstanceIdentifier path = TestModel.TEST_PATH; - String path = reply.getTransactionActorPath().toString(); - assertTrue("Unexpected transaction path " + path, - path.contains("akka://test/user/testCreateTransaction/shard-txn-1")); - expectNoMsg(); - }}; + new ShardTestKit(getSystem()) { + { + assertEquals("Got first ElectionTimeout", true, onFirstElectionTimeout.await(5, TimeUnit.SECONDS)); + + shard.tell(new RegisterDataTreeChangeListener(path, dclActor, false), getRef()); + final RegisterDataTreeChangeListenerReply reply = expectMsgClass(duration("5 seconds"), + RegisterDataTreeChangeListenerReply.class); + assertNotNull("getListenerRegistratioznPath", reply.getListenerRegistrationPath()); + + shard.tell(FindLeader.INSTANCE, getRef()); + final FindLeaderReply findLeadeReply = expectMsgClass(duration("5 seconds"), FindLeaderReply.class); + assertFalse("Expected the shard not to be the leader", findLeadeReply.getLeaderActor().isPresent()); + + onChangeListenerRegistered.countDown(); + + // TODO: investigate why we do not receive data chage events + listener.waitForChangeEvents(); + } + }; } @Test - public void testCreateTransactionOnChain(){ - new ShardTestKit(getSystem()) {{ - final ActorRef subject = getSystem().actorOf(newShardProps(), "testCreateTransactionOnChain"); + public void testCreateTransaction() { + new ShardTestKit(getSystem()) { + { + final ActorRef shard = actorFactory.createActor(newShardProps(), "testCreateTransaction"); - waitUntilLeader(subject); + waitUntilLeader(shard); - subject.tell(new CreateTransaction("txn-1", - TransactionProxy.TransactionType.READ_ONLY.ordinal() , "foobar").toSerializable(), - getRef()); + shard.tell(new UpdateSchemaContext(TestModel.createTestContext()), getRef()); - CreateTransactionReply reply = expectMsgClass(duration("3 seconds"), - CreateTransactionReply.class); + shard.tell(new CreateTransaction(nextTransactionId(), TransactionType.READ_ONLY.ordinal(), + DataStoreVersions.CURRENT_VERSION).toSerializable(), getRef()); - String path = reply.getTransactionActorPath().toString(); - assertTrue("Unexpected transaction path " + path, - path.contains("akka://test/user/testCreateTransactionOnChain/shard-txn-1")); - expectNoMsg(); - }}; + final CreateTransactionReply reply = expectMsgClass(duration("3 seconds"), + CreateTransactionReply.class); + + final String path = reply.getTransactionPath().toString(); + assertTrue("Unexpected transaction path " + path, path + .startsWith("akka://test/user/testCreateTransaction/shard-member-1:ShardTransactionTest@0:")); + } + }; } @Test - public void testPeerAddressResolved(){ - new JavaTestKit(getSystem()) {{ - final ShardIdentifier identifier = - ShardIdentifier.builder().memberName("member-1") - .shardName("inventory").type("config").build(); - - Props props = Shard.props(identifier, - Collections.singletonMap(identifier, null), - dataStoreContext, SCHEMA_CONTEXT); - final ActorRef subject = getSystem().actorOf(props, "testPeerAddressResolved"); - - new Within(duration("3 seconds")) { - @Override - protected void run() { - - subject.tell( - new PeerAddressResolved(identifier, "akka://foobar"), - getRef()); + public void testCreateTransactionOnChain() { + new ShardTestKit(getSystem()) { + { + final ActorRef shard = actorFactory.createActor(newShardProps(), "testCreateTransactionOnChain"); - expectNoMsg(); - } - }; - }}; + waitUntilLeader(shard); + + shard.tell(new CreateTransaction(nextTransactionId(), TransactionType.READ_ONLY.ordinal(), + DataStoreVersions.CURRENT_VERSION).toSerializable(), getRef()); + + final CreateTransactionReply reply = expectMsgClass(duration("3 seconds"), + CreateTransactionReply.class); + + final String path = reply.getTransactionPath().toString(); + assertTrue("Unexpected transaction path " + path, path.startsWith( + "akka://test/user/testCreateTransactionOnChain/shard-member-1:ShardTransactionTest@0:")); + } + }; } @Test - public void testApplySnapshot() throws ExecutionException, InterruptedException { - TestActorRef ref = TestActorRef.create(getSystem(), newShardProps()); + public void testPeerAddressResolved() throws Exception { + new ShardTestKit(getSystem()) { + { + final ShardIdentifier peerID = ShardIdentifier.create("inventory", MemberName.forName("member-2"), + "config"); + final TestActorRef shard = actorFactory.createTestActor(newShardBuilder() + .peerAddresses(Collections.singletonMap(peerID.toString(), null)) + .props().withDispatcher(Dispatchers.DefaultDispatcherId()), "testPeerAddressResolved"); + + final String address = "akka://foobar"; + shard.tell(new PeerAddressResolved(peerID.toString(), address), ActorRef.noSender()); + + shard.tell(GetOnDemandRaftState.INSTANCE, getRef()); + final OnDemandRaftState state = expectMsgClass(OnDemandRaftState.class); + assertEquals("getPeerAddress", address, state.getPeerAddresses().get(peerID.toString())); + } + }; + } + + @Test + public void testApplySnapshot() throws Exception { - NormalizedNodeToNodeCodec codec = - new NormalizedNodeToNodeCodec(SCHEMA_CONTEXT); + final TestActorRef shard = actorFactory.createTestActor(newShardProps() + .withDispatcher(Dispatchers.DefaultDispatcherId()), "testApplySnapshot"); - writeToStore(ref, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME)); + ShardTestKit.waitUntilLeader(shard); - YangInstanceIdentifier root = YangInstanceIdentifier.builder().build(); - NormalizedNode expected = readStore(ref, root); + final DataTree store = InMemoryDataTreeFactory.getInstance().create(TreeType.OPERATIONAL); + store.setSchemaContext(SCHEMA_CONTEXT); - NormalizedNodeMessages.Container encode = codec.encode(expected); + final ContainerNode container = ImmutableContainerNodeBuilder.create().withNodeIdentifier( + new YangInstanceIdentifier.NodeIdentifier(TestModel.TEST_QNAME)) + .withChild(ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).addChild( + ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1)).build()).build(); - ApplySnapshot applySnapshot = new ApplySnapshot(Snapshot.create( - encode.getNormalizedNode().toByteString().toByteArray(), - Collections.emptyList(), 1, 2, 3, 4)); + writeToStore(store, TestModel.TEST_PATH, container); - ref.underlyingActor().onReceiveCommand(applySnapshot); + final YangInstanceIdentifier root = YangInstanceIdentifier.EMPTY; + final NormalizedNode expected = readStore(store, root); - NormalizedNode actual = readStore(ref, root); + final Snapshot snapshot = Snapshot.create(new MetadataShardDataTreeSnapshot(expected).serialize(), + Collections.emptyList(), 1, 2, 3, 4); - assertEquals(expected, actual); + shard.tell(new ApplySnapshot(snapshot), ActorRef.noSender()); + + final Stopwatch sw = Stopwatch.createStarted(); + while (sw.elapsed(TimeUnit.SECONDS) <= 5) { + Uninterruptibles.sleepUninterruptibly(75, TimeUnit.MILLISECONDS); + + try { + assertEquals("Root node", expected, readStore(shard, root)); + return; + } catch (final AssertionError e) { + // try again + } + } + + fail("Snapshot was not applied"); } @Test public void testApplyState() throws Exception { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), "testApplyState"); - TestActorRef shard = TestActorRef.create(getSystem(), newShardProps()); + ShardTestKit.waitUntilLeader(shard); - NormalizedNode node = ImmutableNodes.containerNode(TestModel.TEST_QNAME); + final DataTree store = InMemoryDataTreeFactory.getInstance().create(TreeType.OPERATIONAL); + store.setSchemaContext(SCHEMA_CONTEXT); + writeToStore(store, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME)); - MutableCompositeModification compMod = new MutableCompositeModification(); - compMod.addModification(new WriteModification(TestModel.TEST_PATH, node, SCHEMA_CONTEXT)); - Payload payload = new CompositeModificationPayload(compMod.toSerializable()); - ApplyState applyState = new ApplyState(null, "test", - new ReplicatedLogImplEntry(1, 2, payload)); + final NormalizedNode root = readStore(store, YangInstanceIdentifier.EMPTY); + final Snapshot snapshot = Snapshot.create(new MetadataShardDataTreeSnapshot(root).serialize(), + Collections.emptyList(), 1, 2, 3, 4); - shard.underlyingActor().onReceiveCommand(applyState); + shard.tell(new ApplySnapshot(snapshot), ActorRef.noSender()); - NormalizedNode actual = readStore(shard, TestModel.TEST_PATH); - assertEquals("Applied state", node, actual); - } + final DataTreeModification writeMod = store.takeSnapshot().newModification(); + final ContainerNode node = ImmutableNodes.containerNode(TestModel.TEST_QNAME); + writeMod.write(TestModel.TEST_PATH, node); + writeMod.ready(); - @SuppressWarnings("serial") - @Test - public void testRecovery() throws Exception { + final TransactionIdentifier tx = nextTransactionId(); + final ApplyState applyState = new ApplyState(null, tx, + new SimpleReplicatedLogEntry(1, 2, payloadForModification(store, writeMod, tx))); - // Set up the InMemorySnapshotStore. + shard.tell(applyState, shard); + + final Stopwatch sw = Stopwatch.createStarted(); + while (sw.elapsed(TimeUnit.SECONDS) <= 5) { + Uninterruptibles.sleepUninterruptibly(75, TimeUnit.MILLISECONDS); - InMemoryDOMDataStore testStore = InMemoryDOMDataStoreFactory.create("Test", null, null); - testStore.onGlobalContextUpdated(SCHEMA_CONTEXT); + final NormalizedNode actual = readStore(shard, TestModel.TEST_PATH); + if (actual != null) { + assertEquals("Applied state", node, actual); + return; + } + } - DOMStoreWriteTransaction writeTx = testStore.newWriteOnlyTransaction(); - writeTx.write(TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME)); - DOMStoreThreePhaseCommitCohort commitCohort = writeTx.ready(); - commitCohort.preCommit().get(); - commitCohort.commit().get(); + fail("State was not applied"); + } - DOMStoreReadTransaction readTx = testStore.newReadOnlyTransaction(); - NormalizedNode root = readTx.read(YangInstanceIdentifier.builder().build()).get().get(); + @Test + public void testDataTreeCandidateRecovery() throws Exception { + // Set up the InMemorySnapshotStore. + final DataTree source = setupInMemorySnapshotStore(); - InMemorySnapshotStore.addSnapshot(IDENTIFIER.toString(), Snapshot.create( - new NormalizedNodeToNodeCodec(SCHEMA_CONTEXT).encode( - root). - getNormalizedNode().toByteString().toByteArray(), - Collections.emptyList(), 0, 1, -1, -1)); + final DataTreeModification writeMod = source.takeSnapshot().newModification(); + writeMod.write(TestModel.OUTER_LIST_PATH, ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build()); + writeMod.ready(); + InMemoryJournal.addEntry(shardID.toString(), 0, DUMMY_DATA); // Set up the InMemoryJournal. + InMemoryJournal.addEntry(shardID.toString(), 1, new SimpleReplicatedLogEntry(0, 1, + payloadForModification(source, writeMod, nextTransactionId()))); - InMemoryJournal.addEntry(IDENTIFIER.toString(), 0, new ReplicatedLogImplEntry(0, 1, newPayload( - new WriteModification(TestModel.OUTER_LIST_PATH, - ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(), - SCHEMA_CONTEXT)))); + final int nListEntries = 16; + final Set listEntryKeys = new HashSet<>(); - int nListEntries = 11; - Set listEntryKeys = new HashSet<>(); - for(int i = 1; i <= nListEntries; i++) { + // Add some ModificationPayload entries + for (int i = 1; i <= nListEntries; i++) { listEntryKeys.add(Integer.valueOf(i)); - YangInstanceIdentifier path = YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH) + + final YangInstanceIdentifier path = YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH) .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, i).build(); - Modification mod = new MergeModification(path, - ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, i), - SCHEMA_CONTEXT); - InMemoryJournal.addEntry(IDENTIFIER.toString(), i, new ReplicatedLogImplEntry(i, 1, - newPayload(mod))); + + final DataTreeModification mod = source.takeSnapshot().newModification(); + mod.merge(path, ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, i)); + mod.ready(); + + InMemoryJournal.addEntry(shardID.toString(), i + 1, new SimpleReplicatedLogEntry(i, 1, + payloadForModification(source, mod, nextTransactionId()))); } - InMemoryJournal.addEntry(IDENTIFIER.toString(), nListEntries + 1, - new ApplyLogEntries(nListEntries)); + InMemoryJournal.addEntry(shardID.toString(), nListEntries + 2, + new ApplyJournalEntries(nListEntries)); - // Create the actor and wait for recovery complete. + testRecovery(listEntryKeys); + } + + @Test + @SuppressWarnings("checkstyle:IllegalCatch") + public void testConcurrentThreePhaseCommits() throws Exception { + final AtomicReference caughtEx = new AtomicReference<>(); + final CountDownLatch commitLatch = new CountDownLatch(2); + + final long timeoutSec = 5; + final FiniteDuration duration = FiniteDuration.create(timeoutSec, TimeUnit.SECONDS); + final Timeout timeout = new Timeout(duration); - final CountDownLatch recoveryComplete = new CountDownLatch(1); + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testConcurrentThreePhaseCommits"); + + class OnFutureComplete extends OnComplete { + private final Class expRespType; + + OnFutureComplete(final Class expRespType) { + this.expRespType = expRespType; + } - Creator creator = new Creator() { @Override - public Shard create() throws Exception { - return new Shard(IDENTIFIER, Collections.emptyMap(), - dataStoreContext, SCHEMA_CONTEXT) { - @Override - protected void onRecoveryComplete() { - try { - super.onRecoveryComplete(); - } finally { - recoveryComplete.countDown(); - } + public void onComplete(final Throwable error, final Object resp) { + if (error != null) { + caughtEx.set(new AssertionError(getClass().getSimpleName() + " failure", error)); + } else { + try { + assertEquals("Commit response type", expRespType, resp.getClass()); + onSuccess(resp); + } catch (final Exception e) { + caughtEx.set(e); } - }; + } } - }; - TestActorRef shard = TestActorRef.create(getSystem(), - Props.create(new DelegatingShardCreator(creator)), "testRecovery"); - - assertEquals("Recovery complete", true, recoveryComplete.await(5, TimeUnit.SECONDS)); - - // Verify data in the data store. - - NormalizedNode outerList = readStore(shard, TestModel.OUTER_LIST_PATH); - assertNotNull(TestModel.OUTER_LIST_QNAME.getLocalName() + " not found", outerList); - assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " value is not Iterable", - outerList.getValue() instanceof Iterable); - for(Object entry: (Iterable) outerList.getValue()) { - assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " entry is not MapEntryNode", - entry instanceof MapEntryNode); - MapEntryNode mapEntry = (MapEntryNode)entry; - Optional> idLeaf = - mapEntry.getChild(new YangInstanceIdentifier.NodeIdentifier(TestModel.ID_QNAME)); - assertTrue("Missing leaf " + TestModel.ID_QNAME.getLocalName(), idLeaf.isPresent()); - Object value = idLeaf.get().getValue(); - assertTrue("Unexpected value for leaf "+ TestModel.ID_QNAME.getLocalName() + ": " + value, - listEntryKeys.remove(value)); + void onSuccess(final Object resp) throws Exception { + } } - if(!listEntryKeys.isEmpty()) { - fail("Missing " + TestModel.OUTER_LIST_QNAME.getLocalName() + " entries with keys: " + - listEntryKeys); + class OnCommitFutureComplete extends OnFutureComplete { + OnCommitFutureComplete() { + super(CommitTransactionReply.class); + } + + @Override + public void onComplete(final Throwable error, final Object resp) { + super.onComplete(error, resp); + commitLatch.countDown(); + } } - assertEquals("Last log index", nListEntries, - shard.underlyingActor().getShardMBean().getLastLogIndex()); - assertEquals("Commit index", nListEntries, - shard.underlyingActor().getShardMBean().getCommitIndex()); - assertEquals("Last applied", nListEntries, - shard.underlyingActor().getShardMBean().getLastApplied()); - } + class OnCanCommitFutureComplete extends OnFutureComplete { + private final TransactionIdentifier transactionID; - private CompositeModificationPayload newPayload(Modification... mods) { - MutableCompositeModification compMod = new MutableCompositeModification(); - for(Modification mod: mods) { - compMod.addModification(mod); + OnCanCommitFutureComplete(final TransactionIdentifier transactionID) { + super(CanCommitTransactionReply.class); + this.transactionID = transactionID; + } + + @Override + void onSuccess(final Object resp) throws Exception { + final CanCommitTransactionReply canCommitReply = + CanCommitTransactionReply.fromSerializable(resp); + assertEquals("Can commit", true, canCommitReply.getCanCommit()); + + final Future commitFuture = Patterns.ask(shard, + new CommitTransaction(transactionID, CURRENT_VERSION).toSerializable(), timeout); + commitFuture.onComplete(new OnCommitFutureComplete(), getSystem().dispatcher()); + } } - return new CompositeModificationPayload(compMod.toSerializable()); - } + new ShardTestKit(getSystem()) { + { + waitUntilLeader(shard); - private DOMStoreThreePhaseCommitCohort setupMockWriteTransaction(String cohortName, - InMemoryDOMDataStore dataStore, YangInstanceIdentifier path, NormalizedNode data, - MutableCompositeModification modification) { - return setupMockWriteTransaction(cohortName, dataStore, path, data, modification, null); - } + final TransactionIdentifier transactionID1 = nextTransactionId(); + final TransactionIdentifier transactionID2 = nextTransactionId(); + final TransactionIdentifier transactionID3 = nextTransactionId(); - private DOMStoreThreePhaseCommitCohort setupMockWriteTransaction(String cohortName, - InMemoryDOMDataStore dataStore, YangInstanceIdentifier path, NormalizedNode data, - MutableCompositeModification modification, - final Function> preCommit) { + final Map cohortMap = setupCohortDecorator( + shard.underlyingActor(), transactionID1, transactionID2, transactionID3); + final CapturingShardDataTreeCohort cohort1 = cohortMap.get(transactionID1); + final CapturingShardDataTreeCohort cohort2 = cohortMap.get(transactionID2); + final CapturingShardDataTreeCohort cohort3 = cohortMap.get(transactionID3); - DOMStoreWriteTransaction tx = dataStore.newWriteOnlyTransaction(); - tx.write(path, data); - final DOMStoreThreePhaseCommitCohort realCohort = tx.ready(); - DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class, cohortName); + shard.tell(prepareBatchedModifications(transactionID1, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), false), getRef()); + final ReadyTransactionReply readyReply = ReadyTransactionReply + .fromSerializable(expectMsgClass(duration, ReadyTransactionReply.class)); + assertEquals("Cohort path", shard.path().toString(), readyReply.getCohortPath()); - doAnswer(new Answer>() { - @Override - public ListenableFuture answer(InvocationOnMock invocation) { - return realCohort.canCommit(); - } - }).when(cohort).canCommit(); + // Send the CanCommitTransaction message for the first Tx. - doAnswer(new Answer>() { - @Override - public ListenableFuture answer(InvocationOnMock invocation) throws Throwable { - if(preCommit != null) { - return preCommit.apply(realCohort); - } else { - return realCohort.preCommit(); + shard.tell(new CanCommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + final CanCommitTransactionReply canCommitReply = CanCommitTransactionReply + .fromSerializable(expectMsgClass(duration, CanCommitTransactionReply.class)); + assertEquals("Can commit", true, canCommitReply.getCanCommit()); + + // Ready 2 more Tx's. + + shard.tell(prepareBatchedModifications(transactionID2, TestModel.OUTER_LIST_PATH, + ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(), false), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + shard.tell( + prepareBatchedModifications(transactionID3, + YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH) + .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1).build(), + ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1), false), + getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + // Send the CanCommitTransaction message for the next 2 Tx's. + // These should get queued and + // processed after the first Tx completes. + + final Future canCommitFuture1 = Patterns.ask(shard, + new CanCommitTransaction(transactionID2, CURRENT_VERSION).toSerializable(), timeout); + + final Future canCommitFuture2 = Patterns.ask(shard, + new CanCommitTransaction(transactionID3, CURRENT_VERSION).toSerializable(), timeout); + + // Send the CommitTransaction message for the first Tx. After it + // completes, it should + // trigger the 2nd Tx to proceed which should in turn then + // trigger the 3rd. + + shard.tell(new CommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CommitTransactionReply.class); + + // Wait for the next 2 Tx's to complete. + + canCommitFuture1.onComplete(new OnCanCommitFutureComplete(transactionID2), getSystem().dispatcher()); + + canCommitFuture2.onComplete(new OnCanCommitFutureComplete(transactionID3), getSystem().dispatcher()); + + final boolean done = commitLatch.await(timeoutSec, TimeUnit.SECONDS); + + if (caughtEx.get() != null) { + Throwables.propagateIfInstanceOf(caughtEx.get(), Exception.class); + Throwables.propagate(caughtEx.get()); } + + assertEquals("Commits complete", true, done); + +// final InOrder inOrder = inOrder(cohort1.getCanCommit(), cohort1.getPreCommit(), cohort1.getCommit(), +// cohort2.getCanCommit(), cohort2.getPreCommit(), cohort2.getCommit(), cohort3.getCanCommit(), +// cohort3.getPreCommit(), cohort3.getCommit()); +// inOrder.verify(cohort1.getCanCommit()).onSuccess(any(Void.class)); +// inOrder.verify(cohort1.getPreCommit()).onSuccess(any(DataTreeCandidate.class)); +// inOrder.verify(cohort2.getCanCommit()).onSuccess(any(Void.class)); +// inOrder.verify(cohort2.getPreCommit()).onSuccess(any(DataTreeCandidate.class)); +// inOrder.verify(cohort3.getCanCommit()).onSuccess(any(Void.class)); +// inOrder.verify(cohort3.getPreCommit()).onSuccess(any(DataTreeCandidate.class)); +// inOrder.verify(cohort1.getCommit()).onSuccess(any(UnsignedLong.class)); +// inOrder.verify(cohort2.getCommit()).onSuccess(any(UnsignedLong.class)); +// inOrder.verify(cohort3.getCommit()).onSuccess(any(UnsignedLong.class)); + + // Verify data in the data store. + + verifyOuterListEntry(shard, 1); + + verifyLastApplied(shard, 2); } - }).when(cohort).preCommit(); + }; + } - doAnswer(new Answer>() { - @Override - public ListenableFuture answer(InvocationOnMock invocation) throws Throwable { - return realCohort.commit(); + @Test + public void testBatchedModificationsWithNoCommitOnReady() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testBatchedModificationsWithNoCommitOnReady"); + + waitUntilLeader(shard); + + final TransactionIdentifier transactionID = nextTransactionId(); + final FiniteDuration duration = duration("5 seconds"); + + // Send a BatchedModifications to start a transaction. + + shard.tell(newBatchedModifications(transactionID, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), false, false, 1), getRef()); + expectMsgClass(duration, BatchedModificationsReply.class); + + // Send a couple more BatchedModifications. + + shard.tell( + newBatchedModifications(transactionID, TestModel.OUTER_LIST_PATH, + ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(), false, false, 2), + getRef()); + expectMsgClass(duration, BatchedModificationsReply.class); + + shard.tell(newBatchedModifications(transactionID, + YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH) + .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1).build(), + ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1), true, false, 3), + getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + // Send the CanCommitTransaction message. + + shard.tell(new CanCommitTransaction(transactionID, CURRENT_VERSION).toSerializable(), getRef()); + final CanCommitTransactionReply canCommitReply = CanCommitTransactionReply + .fromSerializable(expectMsgClass(duration, CanCommitTransactionReply.class)); + assertEquals("Can commit", true, canCommitReply.getCanCommit()); + + // Send the CommitTransaction message. + + shard.tell(new CommitTransaction(transactionID, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CommitTransactionReply.class); + + // Verify data in the data store. + + verifyOuterListEntry(shard, 1); } - }).when(cohort).commit(); + }; + } - doAnswer(new Answer>() { - @Override - public ListenableFuture answer(InvocationOnMock invocation) throws Throwable { - return realCohort.abort(); + @Test + public void testBatchedModificationsWithCommitOnReady() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testBatchedModificationsWithCommitOnReady"); + + waitUntilLeader(shard); + + final TransactionIdentifier transactionID = nextTransactionId(); + final FiniteDuration duration = duration("5 seconds"); + + // Send a BatchedModifications to start a transaction. + + shard.tell(newBatchedModifications(transactionID, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), false, false, 1), getRef()); + expectMsgClass(duration, BatchedModificationsReply.class); + + // Send a couple more BatchedModifications. + + shard.tell(newBatchedModifications(transactionID, TestModel.OUTER_LIST_PATH, + ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(), false, false, 2), + getRef()); + expectMsgClass(duration, BatchedModificationsReply.class); + + shard.tell(newBatchedModifications(transactionID, + YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH) + .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1).build(), + ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1), true, true, 3), + getRef()); + + expectMsgClass(duration, CommitTransactionReply.class); + + // Verify data in the data store. + + verifyOuterListEntry(shard, 1); } - }).when(cohort).abort(); + }; + } + + @Test(expected = IllegalStateException.class) + public void testBatchedModificationsReadyWithIncorrectTotalMessageCount() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testBatchedModificationsReadyWithIncorrectTotalMessageCount"); + + waitUntilLeader(shard); - modification.addModification(new WriteModification(path, data, SCHEMA_CONTEXT)); + final TransactionIdentifier transactionID = nextTransactionId(); + final BatchedModifications batched = new BatchedModifications(transactionID, + DataStoreVersions.CURRENT_VERSION); + batched.setReady(true); + batched.setTotalMessagesSent(2); - return cohort; + shard.tell(batched, getRef()); + + final Failure failure = expectMsgClass(duration("5 seconds"), akka.actor.Status.Failure.class); + + if (failure != null) { + Throwables.propagateIfInstanceOf(failure.cause(), Exception.class); + Throwables.propagate(failure.cause()); + } + } + }; } - @SuppressWarnings({ "unchecked" }) @Test - public void testConcurrentThreePhaseCommits() throws Throwable { - System.setProperty("shard.persistent", "true"); - new ShardTestKit(getSystem()) {{ - final TestActorRef shard = TestActorRef.create(getSystem(), - newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName()); + public void testBatchedModificationsWithOperationFailure() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testBatchedModificationsWithOperationFailure"); + + waitUntilLeader(shard); - waitUntilLeader(shard); + // Test merge with invalid data. An exception should occur when + // the merge is applied. Note that + // write will not validate the children for performance reasons. - // Setup 3 simulated transactions with mock cohorts backed by real cohorts. + final TransactionIdentifier transactionID = nextTransactionId(); - InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore(); + final ContainerNode invalidData = ImmutableContainerNodeBuilder.create() + .withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(TestModel.TEST_QNAME)) + .withChild(ImmutableNodes.leafNode(TestModel.JUNK_QNAME, "junk")).build(); - String transactionID1 = "tx1"; - MutableCompositeModification modification1 = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort1 = setupMockWriteTransaction("cohort1", dataStore, - TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), modification1); + BatchedModifications batched = new BatchedModifications(transactionID, CURRENT_VERSION); + batched.addModification(new MergeModification(TestModel.TEST_PATH, invalidData)); + shard.tell(batched, getRef()); + Failure failure = expectMsgClass(duration("5 seconds"), akka.actor.Status.Failure.class); - String transactionID2 = "tx2"; - MutableCompositeModification modification2 = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort2 = setupMockWriteTransaction("cohort2", dataStore, - TestModel.OUTER_LIST_PATH, - ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(), - modification2); + final Throwable cause = failure.cause(); - String transactionID3 = "tx3"; - MutableCompositeModification modification3 = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort3 = setupMockWriteTransaction("cohort3", dataStore, - YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH) - .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1).build(), - ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1), - modification3); + batched = new BatchedModifications(transactionID, DataStoreVersions.CURRENT_VERSION); + batched.setReady(true); + batched.setTotalMessagesSent(2); - long timeoutSec = 5; - final FiniteDuration duration = FiniteDuration.create(timeoutSec, TimeUnit.SECONDS); - final Timeout timeout = new Timeout(duration); + shard.tell(batched, getRef()); + + failure = expectMsgClass(duration("5 seconds"), akka.actor.Status.Failure.class); + assertEquals("Failure cause", cause, failure.cause()); + } + }; + } - // Simulate the ForwardedReadyTransaction message for the first Tx that would be sent - // by the ShardTransaction. + @Test + public void testBatchedModificationsOnTransactionChain() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testBatchedModificationsOnTransactionChain"); - shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1, true), getRef()); - ReadyTransactionReply readyReply = ReadyTransactionReply.fromSerializable( - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS)); - assertEquals("Cohort path", shard.path().toString(), readyReply.getCohortPath()); + waitUntilLeader(shard); - // Send the CanCommitTransaction message for the first Tx. + final LocalHistoryIdentifier historyId = nextHistoryId(); + final TransactionIdentifier transactionID1 = new TransactionIdentifier(historyId, 0); + final TransactionIdentifier transactionID2 = new TransactionIdentifier(historyId, 1); - shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef()); - CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable( - expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS)); - assertEquals("Can commit", true, canCommitReply.getCanCommit()); + final FiniteDuration duration = duration("5 seconds"); - // Send the ForwardedReadyTransaction for the next 2 Tx's. + // Send a BatchedModifications to start a chained write + // transaction and ready it. - shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + final ContainerNode containerNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME); + final YangInstanceIdentifier path = TestModel.TEST_PATH; + shard.tell(newBatchedModifications(transactionID1, path, containerNode, true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); - shard.tell(new ForwardedReadyTransaction(transactionID3, cohort3, modification3, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + // Create a read Tx on the same chain. - // Send the CanCommitTransaction message for the next 2 Tx's. These should get queued and - // processed after the first Tx completes. + shard.tell(new CreateTransaction(transactionID2, TransactionType.READ_ONLY.ordinal(), + DataStoreVersions.CURRENT_VERSION).toSerializable(), getRef()); - Future canCommitFuture1 = Patterns.ask(shard, - new CanCommitTransaction(transactionID2).toSerializable(), timeout); + final CreateTransactionReply createReply = expectMsgClass(duration("3 seconds"), + CreateTransactionReply.class); - Future canCommitFuture2 = Patterns.ask(shard, - new CanCommitTransaction(transactionID3).toSerializable(), timeout); + getSystem().actorSelection(createReply.getTransactionPath()) + .tell(new ReadData(path, DataStoreVersions.CURRENT_VERSION), getRef()); + final ReadDataReply readReply = expectMsgClass(duration("3 seconds"), ReadDataReply.class); + assertEquals("Read node", containerNode, readReply.getNormalizedNode()); - // Send the CommitTransaction message for the first Tx. After it completes, it should - // trigger the 2nd Tx to proceed which should in turn then trigger the 3rd. + // Commit the write transaction. - shard.tell(new CommitTransaction(transactionID1).toSerializable(), getRef()); - expectMsgClass(duration, CommitTransactionReply.SERIALIZABLE_CLASS); + shard.tell(new CanCommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + final CanCommitTransactionReply canCommitReply = CanCommitTransactionReply + .fromSerializable(expectMsgClass(duration, CanCommitTransactionReply.class)); + assertEquals("Can commit", true, canCommitReply.getCanCommit()); - // Wait for the next 2 Tx's to complete. + shard.tell(new CommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CommitTransactionReply.class); - final AtomicReference caughtEx = new AtomicReference<>(); - final CountDownLatch commitLatch = new CountDownLatch(2); + // Verify data in the data store. - class OnFutureComplete extends OnComplete { - private final Class expRespType; + final NormalizedNode actualNode = readStore(shard, path); + assertEquals("Stored node", containerNode, actualNode); + } + }; + } - OnFutureComplete(Class expRespType) { - this.expRespType = expRespType; - } + @Test + public void testOnBatchedModificationsWhenNotLeader() { + final AtomicBoolean overrideLeaderCalls = new AtomicBoolean(); + new ShardTestKit(getSystem()) { + { + final Creator creator = new Creator() { + private static final long serialVersionUID = 1L; - @Override - public void onComplete(Throwable error, Object resp) { - if(error != null) { - System.out.println(new java.util.Date()+": "+getClass().getSimpleName() + " failure: "+error); - caughtEx.set(new AssertionError(getClass().getSimpleName() + " failure", error)); - } else { - try { - assertEquals("Commit response type", expRespType, resp.getClass()); - onSuccess(resp); - } catch (Exception e) { - caughtEx.set(e); - } + @Override + public Shard create() throws Exception { + return new Shard(newShardBuilder()) { + @Override + protected boolean isLeader() { + return overrideLeaderCalls.get() ? false : super.isLeader(); + } + + @Override + public ActorSelection getLeader() { + return overrideLeaderCalls.get() ? getSystem().actorSelection(getRef().path()) + : super.getLeader(); + } + }; } - } + }; - void onSuccess(Object resp) throws Exception { - } + final TestActorRef shard = actorFactory.createTestActor(Props + .create(new DelegatingShardCreator(creator)).withDispatcher(Dispatchers.DefaultDispatcherId()), + "testOnBatchedModificationsWhenNotLeader"); + + waitUntilLeader(shard); + + overrideLeaderCalls.set(true); + + final BatchedModifications batched = new BatchedModifications(nextTransactionId(), + DataStoreVersions.CURRENT_VERSION); + + shard.tell(batched, ActorRef.noSender()); + + expectMsgEquals(batched); } + }; + } - class OnCommitFutureComplete extends OnFutureComplete { - OnCommitFutureComplete() { - super(CommitTransactionReply.SERIALIZABLE_CLASS); - } + @Test + public void testTransactionMessagesWithNoLeader() { + new ShardTestKit(getSystem()) { + { + dataStoreContextBuilder.customRaftPolicyImplementation(DisableElectionsRaftPolicy.class.getName()) + .shardHeartbeatIntervalInMillis(50).shardElectionTimeoutFactor(1); + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testTransactionMessagesWithNoLeader"); + + waitUntilNoLeader(shard); + + final TransactionIdentifier txId = nextTransactionId(); + shard.tell(new BatchedModifications(txId, DataStoreVersions.CURRENT_VERSION), getRef()); + Failure failure = expectMsgClass(Failure.class); + assertEquals("Failure cause type", NoShardLeaderException.class, failure.cause().getClass()); + + shard.tell(prepareForwardedReadyTransaction(shard, txId, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true), getRef()); + failure = expectMsgClass(Failure.class); + assertEquals("Failure cause type", NoShardLeaderException.class, failure.cause().getClass()); + + shard.tell(new ReadyLocalTransaction(txId, mock(DataTreeModification.class), true), getRef()); + failure = expectMsgClass(Failure.class); + assertEquals("Failure cause type", NoShardLeaderException.class, failure.cause().getClass()); + } + }; + } + + @Test + public void testReadyWithReadWriteImmediateCommit() throws Exception { + testReadyWithImmediateCommit(true); + } + + @Test + public void testReadyWithWriteOnlyImmediateCommit() throws Exception { + testReadyWithImmediateCommit(false); + } + + private void testReadyWithImmediateCommit(final boolean readWrite) throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testReadyWithImmediateCommit-" + readWrite); - @Override - public void onComplete(Throwable error, Object resp) { - super.onComplete(error, resp); - commitLatch.countDown(); + waitUntilLeader(shard); + + final TransactionIdentifier transactionID = nextTransactionId(); + final NormalizedNode containerNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME); + if (readWrite) { + shard.tell(prepareForwardedReadyTransaction(shard, transactionID, TestModel.TEST_PATH, + containerNode, true), getRef()); + } else { + shard.tell(prepareBatchedModifications(transactionID, TestModel.TEST_PATH, containerNode, true), + getRef()); } + + expectMsgClass(duration("5 seconds"), CommitTransactionReply.class); + + final NormalizedNode actualNode = readStore(shard, TestModel.TEST_PATH); + assertEquals(TestModel.TEST_QNAME.getLocalName(), containerNode, actualNode); } + }; + } - class OnCanCommitFutureComplete extends OnFutureComplete { - private final String transactionID; + @Test + public void testReadyLocalTransactionWithImmediateCommit() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testReadyLocalTransactionWithImmediateCommit"); - OnCanCommitFutureComplete(String transactionID) { - super(CanCommitTransactionReply.SERIALIZABLE_CLASS); - this.transactionID = transactionID; - } + waitUntilLeader(shard); - @Override - void onSuccess(Object resp) throws Exception { - CanCommitTransactionReply canCommitReply = - CanCommitTransactionReply.fromSerializable(resp); - assertEquals("Can commit", true, canCommitReply.getCanCommit()); + final ShardDataTree dataStore = shard.underlyingActor().getDataStore(); - Future commitFuture = Patterns.ask(shard, - new CommitTransaction(transactionID).toSerializable(), timeout); - commitFuture.onComplete(new OnCommitFutureComplete(), getSystem().dispatcher()); - } + final DataTreeModification modification = dataStore.newModification(); + + final ContainerNode writeData = ImmutableNodes.containerNode(TestModel.TEST_QNAME); + new WriteModification(TestModel.TEST_PATH, writeData).apply(modification); + final MapNode mergeData = ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(); + new MergeModification(TestModel.OUTER_LIST_PATH, mergeData).apply(modification); + + final TransactionIdentifier txId = nextTransactionId(); + modification.ready(); + final ReadyLocalTransaction readyMessage = new ReadyLocalTransaction(txId, modification, true); + + shard.tell(readyMessage, getRef()); + + expectMsgClass(CommitTransactionReply.class); + + final NormalizedNode actualNode = readStore(shard, TestModel.OUTER_LIST_PATH); + assertEquals(TestModel.OUTER_LIST_QNAME.getLocalName(), mergeData, actualNode); } + }; + } - canCommitFuture1.onComplete(new OnCanCommitFutureComplete(transactionID2), - getSystem().dispatcher()); + @Test + public void testReadyLocalTransactionWithThreePhaseCommit() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testReadyLocalTransactionWithThreePhaseCommit"); + + waitUntilLeader(shard); + + final ShardDataTree dataStore = shard.underlyingActor().getDataStore(); + + final DataTreeModification modification = dataStore.newModification(); + + final ContainerNode writeData = ImmutableNodes.containerNode(TestModel.TEST_QNAME); + new WriteModification(TestModel.TEST_PATH, writeData).apply(modification); + final MapNode mergeData = ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(); + new MergeModification(TestModel.OUTER_LIST_PATH, mergeData).apply(modification); + + final TransactionIdentifier txId = nextTransactionId(); + modification.ready(); + final ReadyLocalTransaction readyMessage = new ReadyLocalTransaction(txId, modification, false); + + shard.tell(readyMessage, getRef()); + + expectMsgClass(ReadyTransactionReply.class); + + // Send the CanCommitTransaction message. + + shard.tell(new CanCommitTransaction(txId, CURRENT_VERSION).toSerializable(), getRef()); + final CanCommitTransactionReply canCommitReply = CanCommitTransactionReply + .fromSerializable(expectMsgClass(CanCommitTransactionReply.class)); + assertEquals("Can commit", true, canCommitReply.getCanCommit()); + + // Send the CanCommitTransaction message. + + shard.tell(new CommitTransaction(txId, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(CommitTransactionReply.class); + + final NormalizedNode actualNode = readStore(shard, TestModel.OUTER_LIST_PATH); + assertEquals(TestModel.OUTER_LIST_QNAME.getLocalName(), mergeData, actualNode); + } + }; + } - canCommitFuture2.onComplete(new OnCanCommitFutureComplete(transactionID3), - getSystem().dispatcher()); + @Test + public void testReadWriteCommitWithPersistenceDisabled() throws Exception { + dataStoreContextBuilder.persistent(false); + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testCommitWithPersistenceDisabled"); + + waitUntilLeader(shard); + + // Setup a simulated transactions with a mock cohort. + + final FiniteDuration duration = duration("5 seconds"); + + final TransactionIdentifier transactionID = nextTransactionId(); + final NormalizedNode containerNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME); + shard.tell(prepareBatchedModifications(transactionID, TestModel.TEST_PATH, containerNode, false), + getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + // Send the CanCommitTransaction message. + + shard.tell(new CanCommitTransaction(transactionID, CURRENT_VERSION).toSerializable(), getRef()); + final CanCommitTransactionReply canCommitReply = CanCommitTransactionReply + .fromSerializable(expectMsgClass(duration, CanCommitTransactionReply.class)); + assertEquals("Can commit", true, canCommitReply.getCanCommit()); - boolean done = commitLatch.await(timeoutSec, TimeUnit.SECONDS); + // Send the CanCommitTransaction message. - if(caughtEx.get() != null) { - throw caughtEx.get(); + shard.tell(new CommitTransaction(transactionID, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CommitTransactionReply.class); + + final NormalizedNode actualNode = readStore(shard, TestModel.TEST_PATH); + assertEquals(TestModel.TEST_QNAME.getLocalName(), containerNode, actualNode); } + }; + } - assertEquals("Commits complete", true, done); - - InOrder inOrder = inOrder(cohort1, cohort2, cohort3); - inOrder.verify(cohort1).canCommit(); - inOrder.verify(cohort1).preCommit(); - inOrder.verify(cohort1).commit(); - inOrder.verify(cohort2).canCommit(); - inOrder.verify(cohort2).preCommit(); - inOrder.verify(cohort2).commit(); - inOrder.verify(cohort3).canCommit(); - inOrder.verify(cohort3).preCommit(); - inOrder.verify(cohort3).commit(); - - // Verify data in the data store. - - NormalizedNode outerList = readStore(shard, TestModel.OUTER_LIST_PATH); - assertNotNull(TestModel.OUTER_LIST_QNAME.getLocalName() + " not found", outerList); - assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " value is not Iterable", - outerList.getValue() instanceof Iterable); - Object entry = ((Iterable)outerList.getValue()).iterator().next(); - assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " entry is not MapEntryNode", - entry instanceof MapEntryNode); - MapEntryNode mapEntry = (MapEntryNode)entry; - Optional> idLeaf = - mapEntry.getChild(new YangInstanceIdentifier.NodeIdentifier(TestModel.ID_QNAME)); - assertTrue("Missing leaf " + TestModel.ID_QNAME.getLocalName(), idLeaf.isPresent()); - assertEquals(TestModel.ID_QNAME.getLocalName() + " value", 1, idLeaf.get().getValue()); - - assertEquals("Last log index", 2, shard.underlyingActor().getShardMBean().getLastLogIndex()); - }}; + @Test + public void testReadWriteCommitWhenTransactionHasNoModifications() { + testCommitWhenTransactionHasNoModifications(true); } @Test - public void testCommitPhaseFailure() throws Throwable { - new ShardTestKit(getSystem()) {{ - final TestActorRef shard = TestActorRef.create(getSystem(), - newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName()); + public void testWriteOnlyCommitWhenTransactionHasNoModifications() { + testCommitWhenTransactionHasNoModifications(false); + } + + private void testCommitWhenTransactionHasNoModifications(final boolean readWrite) { + // Note that persistence is enabled which would normally result in the + // entry getting written to the journal + // but here that need not happen + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testCommitWhenTransactionHasNoModifications-" + readWrite); - waitUntilLeader(shard); + waitUntilLeader(shard); - // Setup 2 simulated transactions with mock cohorts. The first one fails in the - // commit phase. + final TransactionIdentifier transactionID = nextTransactionId(); - String transactionID1 = "tx1"; - MutableCompositeModification modification1 = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort1 = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1"); - doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort1).canCommit(); - doReturn(Futures.immediateFuture(null)).when(cohort1).preCommit(); - doReturn(Futures.immediateFailedFuture(new IllegalStateException("mock"))).when(cohort1).commit(); + final FiniteDuration duration = duration("5 seconds"); + + if (readWrite) { + final ReadWriteShardDataTreeTransaction rwTx = shard.underlyingActor().getDataStore() + .newReadWriteTransaction(transactionID); + shard.tell(new ForwardedReadyTransaction(transactionID, CURRENT_VERSION, rwTx, false), getRef()); + } else { + shard.tell(prepareBatchedModifications(transactionID, new MutableCompositeModification()), + getRef()); + } - String transactionID2 = "tx2"; - MutableCompositeModification modification2 = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort2 = mock(DOMStoreThreePhaseCommitCohort.class, "cohort2"); - doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort2).canCommit(); + expectMsgClass(duration, ReadyTransactionReply.class); - FiniteDuration duration = duration("5 seconds"); - final Timeout timeout = new Timeout(duration); + // Send the CanCommitTransaction message. - // Simulate the ForwardedReadyTransaction messages that would be sent - // by the ShardTransaction. + shard.tell(new CanCommitTransaction(transactionID, CURRENT_VERSION).toSerializable(), getRef()); + final CanCommitTransactionReply canCommitReply = CanCommitTransactionReply + .fromSerializable(expectMsgClass(duration, CanCommitTransactionReply.class)); + assertEquals("Can commit", true, canCommitReply.getCanCommit()); - shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + shard.tell(new CommitTransaction(transactionID, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CommitTransactionReply.class); - shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + shard.tell(Shard.GET_SHARD_MBEAN_MESSAGE, getRef()); + final ShardStats shardStats = expectMsgClass(duration, ShardStats.class); - // Send the CanCommitTransaction message for the first Tx. + // Use MBean for verification + // Committed transaction count should increase as usual + assertEquals(1, shardStats.getCommittedTransactionsCount()); - shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef()); - CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable( - expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS)); - assertEquals("Can commit", true, canCommitReply.getCanCommit()); + // Commit index should not advance because this does not go into + // the journal + assertEquals(-1, shardStats.getCommitIndex()); + } + }; + } - // Send the CanCommitTransaction message for the 2nd Tx. This should get queued and - // processed after the first Tx completes. + @Test + public void testReadWriteCommitWhenTransactionHasModifications() throws Exception { + testCommitWhenTransactionHasModifications(true); + } - Future canCommitFuture = Patterns.ask(shard, - new CanCommitTransaction(transactionID2).toSerializable(), timeout); + @Test + public void testWriteOnlyCommitWhenTransactionHasModifications() throws Exception { + testCommitWhenTransactionHasModifications(false); + } - // Send the CommitTransaction message for the first Tx. This should send back an error - // and trigger the 2nd Tx to proceed. + private void testCommitWhenTransactionHasModifications(final boolean readWrite) throws Exception { + new ShardTestKit(getSystem()) { + { + final TipProducingDataTree dataTree = createDelegatingMockDataTree(); + final TestActorRef shard = actorFactory.createTestActor( + newShardBuilder().dataTree(dataTree).props().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testCommitWhenTransactionHasModifications-" + readWrite); - shard.tell(new CommitTransaction(transactionID1).toSerializable(), getRef()); - expectMsgClass(duration, akka.actor.Status.Failure.class); + waitUntilLeader(shard); - // Wait for the 2nd Tx to complete the canCommit phase. + final FiniteDuration duration = duration("5 seconds"); + final TransactionIdentifier transactionID = nextTransactionId(); - final CountDownLatch latch = new CountDownLatch(1); - canCommitFuture.onComplete(new OnComplete() { - @Override - public void onComplete(Throwable t, Object resp) { - latch.countDown(); + if (readWrite) { + shard.tell(prepareForwardedReadyTransaction(shard, transactionID, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), false), getRef()); + } else { + shard.tell(prepareBatchedModifications(transactionID, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), false), getRef()); } - }, getSystem().dispatcher()); - assertEquals("2nd CanCommit complete", true, latch.await(5, TimeUnit.SECONDS)); + expectMsgClass(duration, ReadyTransactionReply.class); + + // Send the CanCommitTransaction message. + + shard.tell(new CanCommitTransaction(transactionID, CURRENT_VERSION).toSerializable(), getRef()); + final CanCommitTransactionReply canCommitReply = CanCommitTransactionReply + .fromSerializable(expectMsgClass(duration, CanCommitTransactionReply.class)); + assertEquals("Can commit", true, canCommitReply.getCanCommit()); + + shard.tell(new CommitTransaction(transactionID, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CommitTransactionReply.class); - InOrder inOrder = inOrder(cohort1, cohort2); - inOrder.verify(cohort1).canCommit(); - inOrder.verify(cohort1).preCommit(); - inOrder.verify(cohort1).commit(); - inOrder.verify(cohort2).canCommit(); - }}; + final InOrder inOrder = inOrder(dataTree); + inOrder.verify(dataTree).validate(any(DataTreeModification.class)); + inOrder.verify(dataTree).prepare(any(DataTreeModification.class)); + inOrder.verify(dataTree).commit(any(DataTreeCandidate.class)); + + shard.tell(Shard.GET_SHARD_MBEAN_MESSAGE, getRef()); + final ShardStats shardStats = expectMsgClass(duration, ShardStats.class); + + // Use MBean for verification + // Committed transaction count should increase as usual + assertEquals(1, shardStats.getCommittedTransactionsCount()); + + // Commit index should advance as we do not have an empty + // modification + assertEquals(0, shardStats.getCommitIndex()); + } + }; } @Test - public void testPreCommitPhaseFailure() throws Throwable { - new ShardTestKit(getSystem()) {{ - final TestActorRef shard = TestActorRef.create(getSystem(), - newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName()); + public void testCommitPhaseFailure() throws Exception { + new ShardTestKit(getSystem()) { + { + final TipProducingDataTree dataTree = createDelegatingMockDataTree(); + final TestActorRef shard = actorFactory.createTestActor( + newShardBuilder().dataTree(dataTree).props().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testCommitPhaseFailure"); + + waitUntilLeader(shard); + + final FiniteDuration duration = duration("5 seconds"); + final Timeout timeout = new Timeout(duration); + + // Setup 2 simulated transactions with mock cohorts. The first + // one fails in the + // commit phase. + + doThrow(new RuntimeException("mock commit failure")).when(dataTree) + .commit(any(DataTreeCandidate.class)); + + final TransactionIdentifier transactionID1 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID1, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + final TransactionIdentifier transactionID2 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID2, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + // Send the CanCommitTransaction message for the first Tx. - waitUntilLeader(shard); + shard.tell(new CanCommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + final CanCommitTransactionReply canCommitReply = CanCommitTransactionReply + .fromSerializable(expectMsgClass(duration, CanCommitTransactionReply.class)); + assertEquals("Can commit", true, canCommitReply.getCanCommit()); - String transactionID = "tx1"; - MutableCompositeModification modification = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1"); - doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort).canCommit(); - doReturn(Futures.immediateFailedFuture(new IllegalStateException("mock"))).when(cohort).preCommit(); + // Send the CanCommitTransaction message for the 2nd Tx. This + // should get queued and + // processed after the first Tx completes. - FiniteDuration duration = duration("5 seconds"); + final Future canCommitFuture = Patterns.ask(shard, + new CanCommitTransaction(transactionID2, CURRENT_VERSION).toSerializable(), timeout); - // Simulate the ForwardedReadyTransaction messages that would be sent - // by the ShardTransaction. + // Send the CommitTransaction message for the first Tx. This + // should send back an error + // and trigger the 2nd Tx to proceed. - shard.tell(new ForwardedReadyTransaction(transactionID, cohort, modification, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + shard.tell(new CommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, akka.actor.Status.Failure.class); - // Send the CanCommitTransaction message. + // Wait for the 2nd Tx to complete the canCommit phase. - shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef()); - CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable( - expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS)); - assertEquals("Can commit", true, canCommitReply.getCanCommit()); + final CountDownLatch latch = new CountDownLatch(1); + canCommitFuture.onComplete(new OnComplete() { + @Override + public void onComplete(final Throwable failure, final Object resp) { + latch.countDown(); + } + }, getSystem().dispatcher()); - // Send the CommitTransaction message. This should send back an error - // for preCommit failure. + assertEquals("2nd CanCommit complete", true, latch.await(5, TimeUnit.SECONDS)); - shard.tell(new CommitTransaction(transactionID).toSerializable(), getRef()); - expectMsgClass(duration, akka.actor.Status.Failure.class); + final InOrder inOrder = inOrder(dataTree); + inOrder.verify(dataTree).validate(any(DataTreeModification.class)); + inOrder.verify(dataTree).prepare(any(DataTreeModification.class)); - InOrder inOrder = inOrder(cohort); - inOrder.verify(cohort).canCommit(); - inOrder.verify(cohort).preCommit(); - }}; + // FIXME: this invocation is done on the result of validate(). To test it, we need to make sure mock + // validate performs wrapping and we capture that mock + // inOrder.verify(dataTree).validate(any(DataTreeModification.class)); + + inOrder.verify(dataTree).commit(any(DataTreeCandidate.class)); + } + }; } @Test - public void testCanCommitPhaseFailure() throws Throwable { - new ShardTestKit(getSystem()) {{ - final TestActorRef shard = TestActorRef.create(getSystem(), - newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName()); + public void testPreCommitPhaseFailure() throws Exception { + new ShardTestKit(getSystem()) { + { + final TipProducingDataTree dataTree = createDelegatingMockDataTree(); + final TestActorRef shard = actorFactory.createTestActor( + newShardBuilder().dataTree(dataTree).props().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testPreCommitPhaseFailure"); + + waitUntilLeader(shard); + + final FiniteDuration duration = duration("5 seconds"); + final Timeout timeout = new Timeout(duration); + + doThrow(new RuntimeException("mock preCommit failure")).when(dataTree) + .prepare(any(DataTreeModification.class)); - waitUntilLeader(shard); + final TransactionIdentifier transactionID1 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID1, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); - final FiniteDuration duration = duration("5 seconds"); + final TransactionIdentifier transactionID2 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID2, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); - String transactionID = "tx1"; - MutableCompositeModification modification = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1"); - doReturn(Futures.immediateFailedFuture(new IllegalStateException("mock"))).when(cohort).canCommit(); + // Send the CanCommitTransaction message for the first Tx. - // Simulate the ForwardedReadyTransaction messages that would be sent - // by the ShardTransaction. + shard.tell(new CanCommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + final CanCommitTransactionReply canCommitReply = CanCommitTransactionReply + .fromSerializable(expectMsgClass(duration, CanCommitTransactionReply.class)); + assertEquals("Can commit", true, canCommitReply.getCanCommit()); - shard.tell(new ForwardedReadyTransaction(transactionID, cohort, modification, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + // Send the CanCommitTransaction message for the 2nd Tx. This + // should get queued and + // processed after the first Tx completes. - // Send the CanCommitTransaction message. + final Future canCommitFuture = Patterns.ask(shard, + new CanCommitTransaction(transactionID2, CURRENT_VERSION).toSerializable(), timeout); - shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef()); - expectMsgClass(duration, akka.actor.Status.Failure.class); - }}; + // Send the CommitTransaction message for the first Tx. This + // should send back an error + // and trigger the 2nd Tx to proceed. + + shard.tell(new CommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, akka.actor.Status.Failure.class); + + // Wait for the 2nd Tx to complete the canCommit phase. + + final CountDownLatch latch = new CountDownLatch(1); + canCommitFuture.onComplete(new OnComplete() { + @Override + public void onComplete(final Throwable failure, final Object resp) { + latch.countDown(); + } + }, getSystem().dispatcher()); + + assertEquals("2nd CanCommit complete", true, latch.await(5, TimeUnit.SECONDS)); + + final InOrder inOrder = inOrder(dataTree); + inOrder.verify(dataTree).validate(any(DataTreeModification.class)); + inOrder.verify(dataTree).prepare(any(DataTreeModification.class)); + inOrder.verify(dataTree).validate(any(DataTreeModification.class)); + } + }; } @Test - public void testAbortBeforeFinishCommit() throws Throwable { - System.setProperty("shard.persistent", "true"); - new ShardTestKit(getSystem()) {{ - final TestActorRef shard = TestActorRef.create(getSystem(), - newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName()); - - waitUntilLeader(shard); - - final FiniteDuration duration = duration("5 seconds"); - final Timeout timeout = new Timeout(duration); - - InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore(); - - final String transactionID = "tx1"; - final CountDownLatch abortComplete = new CountDownLatch(1); - Function> preCommit = - new Function>() { - @Override - public ListenableFuture apply(final DOMStoreThreePhaseCommitCohort cohort) { - ListenableFuture preCommitFuture = cohort.preCommit(); - - Future abortFuture = Patterns.ask(shard, - new AbortTransaction(transactionID).toSerializable(), timeout); - abortFuture.onComplete(new OnComplete() { - @Override - public void onComplete(Throwable e, Object resp) { - abortComplete.countDown(); - } - }, getSystem().dispatcher()); + public void testCanCommitPhaseFailure() throws Exception { + new ShardTestKit(getSystem()) { + { + final TipProducingDataTree dataTree = createDelegatingMockDataTree(); + final TestActorRef shard = actorFactory.createTestActor( + newShardBuilder().dataTree(dataTree).props().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testCanCommitPhaseFailure"); - return preCommitFuture; - } - }; + waitUntilLeader(shard); + + final FiniteDuration duration = duration("5 seconds"); + final TransactionIdentifier transactionID1 = nextTransactionId(); - MutableCompositeModification modification = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort = setupMockWriteTransaction("cohort1", dataStore, - TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), - modification, preCommit); + doThrow(new DataValidationFailedException(YangInstanceIdentifier.EMPTY, "mock canCommit failure")) + .doNothing().when(dataTree).validate(any(DataTreeModification.class)); - shard.tell(new ForwardedReadyTransaction(transactionID, cohort, modification, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + shard.tell(newBatchedModifications(transactionID1, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); - shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef()); - CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable( - expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS)); - assertEquals("Can commit", true, canCommitReply.getCanCommit()); + // Send the CanCommitTransaction message. - Future commitFuture = Patterns.ask(shard, - new CommitTransaction(transactionID).toSerializable(), timeout); + shard.tell(new CanCommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, akka.actor.Status.Failure.class); - assertEquals("Abort complete", true, abortComplete.await(5, TimeUnit.SECONDS)); + // Send another can commit to ensure the failed one got cleaned + // up. - Await.result(commitFuture, duration); + final TransactionIdentifier transactionID2 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID2, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); - NormalizedNode node = readStore(shard, TestModel.TEST_PATH); - assertNotNull(TestModel.TEST_QNAME.getLocalName() + " not found", node); - }}; + shard.tell(new CanCommitTransaction(transactionID2, CURRENT_VERSION).toSerializable(), getRef()); + final CanCommitTransactionReply reply = CanCommitTransactionReply + .fromSerializable(expectMsgClass(CanCommitTransactionReply.class)); + assertEquals("getCanCommit", true, reply.getCanCommit()); + } + }; } @Test - public void testTransactionCommitTimeout() throws Throwable { - dataStoreContext = DatastoreContext.newBuilder().shardTransactionCommitTimeoutInSeconds(1).build(); + public void testImmediateCommitWithCanCommitPhaseFailure() throws Exception { + testImmediateCommitWithCanCommitPhaseFailure(true); + testImmediateCommitWithCanCommitPhaseFailure(false); + } + + private void testImmediateCommitWithCanCommitPhaseFailure(final boolean readWrite) throws Exception { + new ShardTestKit(getSystem()) { + { + final TipProducingDataTree dataTree = createDelegatingMockDataTree(); + final TestActorRef shard = actorFactory.createTestActor( + newShardBuilder().dataTree(dataTree).props().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testImmediateCommitWithCanCommitPhaseFailure-" + readWrite); - new ShardTestKit(getSystem()) {{ - final TestActorRef shard = TestActorRef.create(getSystem(), - newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName()); + waitUntilLeader(shard); - waitUntilLeader(shard); + doThrow(new DataValidationFailedException(YangInstanceIdentifier.EMPTY, "mock canCommit failure")) + .doNothing().when(dataTree).validate(any(DataTreeModification.class)); - final FiniteDuration duration = duration("5 seconds"); + final FiniteDuration duration = duration("5 seconds"); - InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore(); + final TransactionIdentifier transactionID1 = nextTransactionId(); - writeToStore(shard, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME)); - writeToStore(shard, TestModel.OUTER_LIST_PATH, - ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build()); + if (readWrite) { + shard.tell(prepareForwardedReadyTransaction(shard, transactionID1, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true), getRef()); + } else { + shard.tell(prepareBatchedModifications(transactionID1, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true), getRef()); + } - // Create 1st Tx - will timeout + expectMsgClass(duration, akka.actor.Status.Failure.class); - String transactionID1 = "tx1"; - MutableCompositeModification modification1 = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort1 = setupMockWriteTransaction("cohort1", dataStore, - YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH) - .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1).build(), - ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1), - modification1); + // Send another can commit to ensure the failed one got cleaned + // up. - // Create 2nd Tx + final TransactionIdentifier transactionID2 = nextTransactionId(); + if (readWrite) { + shard.tell(prepareForwardedReadyTransaction(shard, transactionID2, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true), getRef()); + } else { + shard.tell(prepareBatchedModifications(transactionID2, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true), getRef()); + } - String transactionID2 = "tx3"; - MutableCompositeModification modification2 = new MutableCompositeModification(); - YangInstanceIdentifier listNodePath = YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH) - .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 2).build(); - DOMStoreThreePhaseCommitCohort cohort2 = setupMockWriteTransaction("cohort3", dataStore, - listNodePath, - ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 2), - modification2); + expectMsgClass(duration, CommitTransactionReply.class); + } + }; + } - // Ready the Tx's + @Test + public void testAbortWithCommitPending() throws Exception { + new ShardTestKit(getSystem()) { + { + final Creator creator = () -> new Shard(newShardBuilder()) { + @Override + void persistPayload(final TransactionIdentifier transactionId, final Payload payload, + boolean batchHint) { + // Simulate an AbortTransaction message occurring during + // replication, after + // persisting and before finishing the commit to the + // in-memory store. + + doAbortTransaction(transactionId, null); + super.persistPayload(transactionId, payload, batchHint); + } + }; - shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + final TestActorRef shard = actorFactory.createTestActor(Props + .create(new DelegatingShardCreator(creator)).withDispatcher(Dispatchers.DefaultDispatcherId()), + "testAbortWithCommitPending"); - shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + waitUntilLeader(shard); - // canCommit 1st Tx. We don't send the commit so it should timeout. + final FiniteDuration duration = duration("5 seconds"); - shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef()); - expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS); + final TransactionIdentifier transactionID = nextTransactionId(); - // canCommit the 2nd Tx - it should complete after the 1st Tx times out. + shard.tell(prepareBatchedModifications(transactionID, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), false), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); - shard.tell(new CanCommitTransaction(transactionID2).toSerializable(), getRef()); - expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS); + shard.tell(new CanCommitTransaction(transactionID, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CanCommitTransactionReply.class); - // Commit the 2nd Tx. + shard.tell(new CommitTransaction(transactionID, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CommitTransactionReply.class); - shard.tell(new CommitTransaction(transactionID2).toSerializable(), getRef()); - expectMsgClass(duration, CommitTransactionReply.SERIALIZABLE_CLASS); + final NormalizedNode node = readStore(shard, TestModel.TEST_PATH); - NormalizedNode node = readStore(shard, listNodePath); - assertNotNull(listNodePath + " not found", node); - }}; + // Since we're simulating an abort occurring during replication + // and before finish commit, + // the data should still get written to the in-memory store + // since we've gotten past + // canCommit and preCommit and persisted the data. + assertNotNull(TestModel.TEST_QNAME.getLocalName() + " not found", node); + } + }; } @Test - public void testTransactionCommitQueueCapacityExceeded() throws Throwable { - dataStoreContext = DatastoreContext.newBuilder().shardTransactionCommitQueueCapacity(1).build(); + public void testTransactionCommitTimeout() throws Exception { + dataStoreContextBuilder.shardTransactionCommitTimeoutInSeconds(1); + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testTransactionCommitTimeout"); + + waitUntilLeader(shard); + + final FiniteDuration duration = duration("5 seconds"); + + writeToStore(shard, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME)); + writeToStore(shard, TestModel.OUTER_LIST_PATH, + ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build()); + + // Ready 2 Tx's - the first will timeout + + final TransactionIdentifier transactionID1 = nextTransactionId(); + shard.tell( + prepareBatchedModifications(transactionID1, + YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH) + .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1).build(), + ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1), false), + getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + final TransactionIdentifier transactionID2 = nextTransactionId(); + final YangInstanceIdentifier listNodePath = YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH) + .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 2).build(); + shard.tell( + prepareBatchedModifications(transactionID2, listNodePath, + ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 2), false), + getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + // canCommit 1st Tx. We don't send the commit so it should + // timeout. - new ShardTestKit(getSystem()) {{ - final TestActorRef shard = TestActorRef.create(getSystem(), - newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName()); + shard.tell(new CanCommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CanCommitTransactionReply.class); - waitUntilLeader(shard); + // canCommit the 2nd Tx - it should complete after the 1st Tx + // times out. - final FiniteDuration duration = duration("5 seconds"); + shard.tell(new CanCommitTransaction(transactionID2, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CanCommitTransactionReply.class); - InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore(); + // Try to commit the 1st Tx - should fail as it's not the + // current Tx. - String transactionID1 = "tx1"; - MutableCompositeModification modification1 = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort1 = setupMockWriteTransaction("cohort1", dataStore, - TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), modification1); + shard.tell(new CommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, akka.actor.Status.Failure.class); - String transactionID2 = "tx2"; - MutableCompositeModification modification2 = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort2 = setupMockWriteTransaction("cohort2", dataStore, - TestModel.OUTER_LIST_PATH, - ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(), - modification2); + // Commit the 2nd Tx. - String transactionID3 = "tx3"; - MutableCompositeModification modification3 = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort3 = setupMockWriteTransaction("cohort3", dataStore, - TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), modification3); + shard.tell(new CommitTransaction(transactionID2, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CommitTransactionReply.class); - // Ready the Tx's + final NormalizedNode node = readStore(shard, listNodePath); + assertNotNull(listNodePath + " not found", node); + } + }; + } + +// @Test +// @Ignore +// public void testTransactionCommitQueueCapacityExceeded() throws Throwable { +// dataStoreContextBuilder.shardTransactionCommitQueueCapacity(2); +// +// new ShardTestKit(getSystem()) {{ +// final TestActorRef shard = actorFactory.createTestActor( +// newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), +// "testTransactionCommitQueueCapacityExceeded"); +// +// waitUntilLeader(shard); +// +// final FiniteDuration duration = duration("5 seconds"); +// +// final ShardDataTree dataStore = shard.underlyingActor().getDataStore(); +// +// final TransactionIdentifier transactionID1 = nextTransactionId(); +// final MutableCompositeModification modification1 = new MutableCompositeModification(); +// final ShardDataTreeCohort cohort1 = setupMockWriteTransaction("cohort1", dataStore, +// TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), transactionID1, +// modification1); +// +// final TransactionIdentifier transactionID2 = nextTransactionId(); +// final MutableCompositeModification modification2 = new MutableCompositeModification(); +// final ShardDataTreeCohort cohort2 = setupMockWriteTransaction("cohort2", dataStore, +// TestModel.OUTER_LIST_PATH, +// ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(), transactionID2, +// modification2); +// +// final TransactionIdentifier transactionID3 = nextTransactionId(); +// final MutableCompositeModification modification3 = new MutableCompositeModification(); +// final ShardDataTreeCohort cohort3 = setupMockWriteTransaction("cohort3", dataStore, +// TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), transactionID3, +// modification3); +// +// // Ready the Tx's +// +// shard.tell(prepareReadyTransactionMessage(false, shard.underlyingActor(), cohort1, transactionID1, +// modification1), getRef()); +// expectMsgClass(duration, ReadyTransactionReply.class); +// +// shard.tell(prepareReadyTransactionMessage(false, shard.underlyingActor(), cohort2, transactionID2, +// modification2), getRef()); +// expectMsgClass(duration, ReadyTransactionReply.class); +// +// // The 3rd Tx should exceed queue capacity and fail. +// +// shard.tell(prepareReadyTransactionMessage(false, shard.underlyingActor(), cohort3, transactionID3, +// modification3), getRef()); +// expectMsgClass(duration, akka.actor.Status.Failure.class); +// +// // canCommit 1st Tx. +// +// shard.tell(new CanCommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); +// expectMsgClass(duration, CanCommitTransactionReply.class); +// +// // canCommit the 2nd Tx - it should get queued. +// +// shard.tell(new CanCommitTransaction(transactionID2, CURRENT_VERSION).toSerializable(), getRef()); +// +// // canCommit the 3rd Tx - should exceed queue capacity and fail. +// +// shard.tell(new CanCommitTransaction(transactionID3, CURRENT_VERSION).toSerializable(), getRef()); +// expectMsgClass(duration, akka.actor.Status.Failure.class); +// }}; +// } + + @Test + public void testTransactionCommitWithPriorExpiredCohortEntries() throws Exception { + dataStoreContextBuilder.shardTransactionCommitTimeoutInSeconds(1); + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testTransactionCommitWithPriorExpiredCohortEntries"); + + waitUntilLeader(shard); + + final FiniteDuration duration = duration("5 seconds"); + + final TransactionIdentifier transactionID1 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID1, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + final TransactionIdentifier transactionID2 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID2, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + final TransactionIdentifier transactionID3 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID3, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + // All Tx's are readied. We'll send canCommit for the last one + // but not the others. The others + // should expire from the queue and the last one should be + // processed. + + shard.tell(new CanCommitTransaction(transactionID3, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CanCommitTransactionReply.class); + } + }; + } + + @Test + public void testTransactionCommitWithSubsequentExpiredCohortEntry() throws Exception { + dataStoreContextBuilder.shardTransactionCommitTimeoutInSeconds(1); + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testTransactionCommitWithSubsequentExpiredCohortEntry"); + + waitUntilLeader(shard); + + final FiniteDuration duration = duration("5 seconds"); + + final ShardDataTree dataStore = shard.underlyingActor().getDataStore(); + + final TransactionIdentifier transactionID1 = nextTransactionId(); + shard.tell(prepareBatchedModifications(transactionID1, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), false), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + // CanCommit the first Tx so it's the current in-progress Tx. - shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + shard.tell(new CanCommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CanCommitTransactionReply.class); - shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + // Ready the second Tx. - shard.tell(new ForwardedReadyTransaction(transactionID3, cohort3, modification3, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + final TransactionIdentifier transactionID2 = nextTransactionId(); + shard.tell(prepareBatchedModifications(transactionID2, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), false), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); - // canCommit 1st Tx. + // Ready the third Tx. - shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef()); - expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS); + final TransactionIdentifier transactionID3 = nextTransactionId(); + final DataTreeModification modification3 = dataStore.newModification(); + new WriteModification(TestModel.TEST2_PATH, ImmutableNodes.containerNode(TestModel.TEST2_QNAME)) + .apply(modification3); + modification3.ready(); + final ReadyLocalTransaction readyMessage = new ReadyLocalTransaction(transactionID3, modification3, + true); + shard.tell(readyMessage, getRef()); - // canCommit the 2nd Tx - it should get queued. + // Commit the first Tx. After completing, the second should + // expire from the queue and the third + // Tx committed. - shard.tell(new CanCommitTransaction(transactionID2).toSerializable(), getRef()); + shard.tell(new CommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CommitTransactionReply.class); - // canCommit the 3rd Tx - should exceed queue capacity and fail. + // Expect commit reply from the third Tx. - shard.tell(new CanCommitTransaction(transactionID3).toSerializable(), getRef()); - expectMsgClass(duration, akka.actor.Status.Failure.class); - }}; + expectMsgClass(duration, CommitTransactionReply.class); + + final NormalizedNode node = readStore(shard, TestModel.TEST2_PATH); + assertNotNull(TestModel.TEST2_PATH + " not found", node); + } + }; } @Test - public void testCanCommitBeforeReadyFailure() throws Throwable { - new ShardTestKit(getSystem()) {{ - final TestActorRef shard = TestActorRef.create(getSystem(), - newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName()); - - shard.tell(new CanCommitTransaction("tx").toSerializable(), getRef()); - expectMsgClass(duration("5 seconds"), akka.actor.Status.Failure.class); - }}; + public void testCanCommitBeforeReadyFailure() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testCanCommitBeforeReadyFailure"); + + shard.tell(new CanCommitTransaction(nextTransactionId(), CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration("5 seconds"), akka.actor.Status.Failure.class); + } + }; } @Test - public void testAbortTransaction() throws Throwable { - new ShardTestKit(getSystem()) {{ - final TestActorRef shard = TestActorRef.create(getSystem(), - newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName()); + public void testAbortAfterCanCommit() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), "testAbortAfterCanCommit"); + + waitUntilLeader(shard); + + final FiniteDuration duration = duration("5 seconds"); + final Timeout timeout = new Timeout(duration); + + // Ready 2 transactions - the first one will be aborted. - waitUntilLeader(shard); + final TransactionIdentifier transactionID1 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID1, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); - // Setup 2 simulated transactions with mock cohorts. The first one will be aborted. + final TransactionIdentifier transactionID2 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID2, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); - String transactionID1 = "tx1"; - MutableCompositeModification modification1 = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort1 = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1"); - doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort1).canCommit(); - doReturn(Futures.immediateFuture(null)).when(cohort1).abort(); + // Send the CanCommitTransaction message for the first Tx. - String transactionID2 = "tx2"; - MutableCompositeModification modification2 = new MutableCompositeModification(); - DOMStoreThreePhaseCommitCohort cohort2 = mock(DOMStoreThreePhaseCommitCohort.class, "cohort2"); - doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort2).canCommit(); + shard.tell(new CanCommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + CanCommitTransactionReply canCommitReply = CanCommitTransactionReply + .fromSerializable(expectMsgClass(duration, CanCommitTransactionReply.class)); + assertEquals("Can commit", true, canCommitReply.getCanCommit()); - FiniteDuration duration = duration("5 seconds"); - final Timeout timeout = new Timeout(duration); + // Send the CanCommitTransaction message for the 2nd Tx. This + // should get queued and + // processed after the first Tx completes. - // Simulate the ForwardedReadyTransaction messages that would be sent - // by the ShardTransaction. + final Future canCommitFuture = Patterns.ask(shard, + new CanCommitTransaction(transactionID2, CURRENT_VERSION).toSerializable(), timeout); - shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + // Send the AbortTransaction message for the first Tx. This + // should trigger the 2nd + // Tx to proceed. - shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2, true), getRef()); - expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS); + shard.tell(new AbortTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, AbortTransactionReply.class); - // Send the CanCommitTransaction message for the first Tx. + // Wait for the 2nd Tx to complete the canCommit phase. - shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef()); - CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable( - expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS)); - assertEquals("Can commit", true, canCommitReply.getCanCommit()); + canCommitReply = (CanCommitTransactionReply) Await.result(canCommitFuture, duration); + assertEquals("Can commit", true, canCommitReply.getCanCommit()); + } + }; + } + + @Test + public void testAbortAfterReady() throws Exception { + dataStoreContextBuilder.shardTransactionCommitTimeoutInSeconds(1); + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), "testAbortAfterReady"); - // Send the CanCommitTransaction message for the 2nd Tx. This should get queued and - // processed after the first Tx completes. + waitUntilLeader(shard); - Future canCommitFuture = Patterns.ask(shard, - new CanCommitTransaction(transactionID2).toSerializable(), timeout); + final FiniteDuration duration = duration("5 seconds"); - // Send the AbortTransaction message for the first Tx. This should trigger the 2nd - // Tx to proceed. + // Ready a tx. - shard.tell(new AbortTransaction(transactionID1).toSerializable(), getRef()); - expectMsgClass(duration, AbortTransactionReply.SERIALIZABLE_CLASS); + final TransactionIdentifier transactionID1 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID1, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); - // Wait for the 2nd Tx to complete the canCommit phase. + // Send the AbortTransaction message. - final CountDownLatch latch = new CountDownLatch(1); - canCommitFuture.onComplete(new OnComplete() { - @Override - public void onComplete(Throwable t, Object resp) { - latch.countDown(); - } - }, getSystem().dispatcher()); + shard.tell(new AbortTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, AbortTransactionReply.class); + + assertEquals("getPendingTxCommitQueueSize", 0, shard.underlyingActor().getPendingTxCommitQueueSize()); + + // Now send CanCommitTransaction - should fail. - assertEquals("2nd CanCommit complete", true, latch.await(5, TimeUnit.SECONDS)); + shard.tell(new CanCommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + final Throwable failure = expectMsgClass(duration, akka.actor.Status.Failure.class).cause(); + assertTrue("Failure type", failure instanceof IllegalStateException); - InOrder inOrder = inOrder(cohort1, cohort2); - inOrder.verify(cohort1).canCommit(); - inOrder.verify(cohort2).canCommit(); - }}; + // Ready and CanCommit another and verify success. + + final TransactionIdentifier transactionID2 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID2, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + shard.tell(new CanCommitTransaction(transactionID2, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CanCommitTransactionReply.class); + } + }; } @Test - public void testCreateSnapshot() throws IOException, InterruptedException { - new ShardTestKit(getSystem()) {{ - final AtomicReference latch = new AtomicReference<>(new CountDownLatch(1)); - Creator creator = new Creator() { - @Override - public Shard create() throws Exception { - return new Shard(IDENTIFIER, Collections.emptyMap(), - dataStoreContext, SCHEMA_CONTEXT) { - @Override - public void saveSnapshot(Object snapshot) { - super.saveSnapshot(snapshot); - latch.get().countDown(); - } - }; + public void testAbortQueuedTransaction() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), "testAbortAfterReady"); + + waitUntilLeader(shard); + + final FiniteDuration duration = duration("5 seconds"); + + // Ready 3 tx's. + + final TransactionIdentifier transactionID1 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID1, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + final TransactionIdentifier transactionID2 = nextTransactionId(); + shard.tell(newBatchedModifications(transactionID2, TestModel.TEST_PATH, + ImmutableNodes.containerNode(TestModel.TEST_QNAME), true, false, 1), getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + final TransactionIdentifier transactionID3 = nextTransactionId(); + shard.tell( + newBatchedModifications(transactionID3, TestModel.OUTER_LIST_PATH, + ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(), true, false, 1), + getRef()); + expectMsgClass(duration, ReadyTransactionReply.class); + + // Abort the second tx while it's queued. + + shard.tell(new AbortTransaction(transactionID2, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, AbortTransactionReply.class); + + // Commit the other 2. + + shard.tell(new CanCommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CanCommitTransactionReply.class); + + shard.tell(new CommitTransaction(transactionID1, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CommitTransactionReply.class); + + shard.tell(new CanCommitTransaction(transactionID3, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CanCommitTransactionReply.class); + + shard.tell(new CommitTransaction(transactionID3, CURRENT_VERSION).toSerializable(), getRef()); + expectMsgClass(duration, CommitTransactionReply.class); + + assertEquals("getPendingTxCommitQueueSize", 0, shard.underlyingActor().getPendingTxCommitQueueSize()); + } + }; + } + + @Test + public void testCreateSnapshotWithNonPersistentData() throws Exception { + testCreateSnapshot(false, "testCreateSnapshotWithNonPersistentData"); + } + + @Test + public void testCreateSnapshot() throws Exception { + testCreateSnapshot(true, "testCreateSnapshot"); + } + + private void testCreateSnapshot(final boolean persistent, final String shardActorName) throws Exception { + final AtomicReference latch = new AtomicReference<>(new CountDownLatch(1)); + + final AtomicReference savedSnapshot = new AtomicReference<>(); + class TestPersistentDataProvider extends DelegatingPersistentDataProvider { + TestPersistentDataProvider(final DataPersistenceProvider delegate) { + super(delegate); + } + + @Override + public void saveSnapshot(final Object obj) { + savedSnapshot.set(obj); + super.saveSnapshot(obj); + } + } + + dataStoreContextBuilder.persistent(persistent); + + class TestShard extends Shard { + + protected TestShard(final AbstractBuilder builder) { + super(builder); + setPersistence(new TestPersistentDataProvider(super.persistence())); + } + + @Override + public void handleCommand(final Object message) { + super.handleCommand(message); + + // XXX: commit_snapshot equality check references RaftActorSnapshotMessageSupport.COMMIT_SNAPSHOT + if (message instanceof SaveSnapshotSuccess || "commit_snapshot".equals(message.toString())) { + latch.get().countDown(); } - }; + } + + @Override + public RaftActorContext getRaftActorContext() { + return super.getRaftActorContext(); + } + } - TestActorRef shard = TestActorRef.create(getSystem(), - Props.create(new DelegatingShardCreator(creator)), "testCreateSnapshot"); + new ShardTestKit(getSystem()) { + { + final Creator creator = () -> new TestShard(newShardBuilder()); - waitUntilLeader(shard); + final TestActorRef shard = actorFactory.createTestActor(Props + .create(new DelegatingShardCreator(creator)).withDispatcher(Dispatchers.DefaultDispatcherId()), + shardActorName); - shard.tell(new CaptureSnapshot(-1,-1,-1,-1), getRef()); + waitUntilLeader(shard); + writeToStore(shard, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME)); - assertEquals("Snapshot saved", true, latch.get().await(5, TimeUnit.SECONDS)); + final NormalizedNode expectedRoot = readStore(shard, YangInstanceIdentifier.EMPTY); - latch.set(new CountDownLatch(1)); - shard.tell(new CaptureSnapshot(-1,-1,-1,-1), getRef()); + // Trigger creation of a snapshot by ensuring + final RaftActorContext raftActorContext = ((TestShard) shard.underlyingActor()).getRaftActorContext(); + raftActorContext.getSnapshotManager().capture(mock(ReplicatedLogEntry.class), -1); + awaitAndValidateSnapshot(expectedRoot); - assertEquals("Snapshot saved", true, latch.get().await(5, TimeUnit.SECONDS)); - }}; + raftActorContext.getSnapshotManager().capture(mock(ReplicatedLogEntry.class), -1); + awaitAndValidateSnapshot(expectedRoot); + } + + private void awaitAndValidateSnapshot(final NormalizedNode expectedRoot) + throws InterruptedException, IOException { + assertEquals("Snapshot saved", true, latch.get().await(5, TimeUnit.SECONDS)); + + assertTrue("Invalid saved snapshot " + savedSnapshot.get(), savedSnapshot.get() instanceof Snapshot); + + verifySnapshot((Snapshot) savedSnapshot.get(), expectedRoot); + + latch.set(new CountDownLatch(1)); + savedSnapshot.set(null); + } + + private void verifySnapshot(final Snapshot snapshot, final NormalizedNode expectedRoot) + throws IOException { + final NormalizedNode actual = ShardDataTreeSnapshot.deserialize(snapshot.getState()).getRootNode() + .get(); + assertEquals("Root node", expectedRoot, actual); + } + }; } /** - * This test simply verifies that the applySnapShot logic will work - * @throws ReadFailedException + * This test simply verifies that the applySnapShot logic will work. */ @Test - public void testInMemoryDataStoreRestore() throws ReadFailedException { - InMemoryDOMDataStore store = new InMemoryDOMDataStore("test", MoreExecutors.listeningDecorator( - MoreExecutors.sameThreadExecutor()), MoreExecutors.sameThreadExecutor()); - - store.onGlobalContextUpdated(SCHEMA_CONTEXT); + public void testInMemoryDataTreeRestore() throws ReadFailedException, DataValidationFailedException { + final DataTree store = InMemoryDataTreeFactory.getInstance().create(TreeType.OPERATIONAL); + store.setSchemaContext(SCHEMA_CONTEXT); - DOMStoreWriteTransaction putTransaction = store.newWriteOnlyTransaction(); + final DataTreeModification putTransaction = store.takeSnapshot().newModification(); putTransaction.write(TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME)); - commitTransaction(putTransaction); + commitTransaction(store, putTransaction); - NormalizedNode expected = readStore(store); + final NormalizedNode expected = readStore(store, YangInstanceIdentifier.EMPTY); - DOMStoreWriteTransaction writeTransaction = store.newWriteOnlyTransaction(); + final DataTreeModification writeTransaction = store.takeSnapshot().newModification(); - writeTransaction.delete(YangInstanceIdentifier.builder().build()); - writeTransaction.write(YangInstanceIdentifier.builder().build(), expected); + writeTransaction.delete(YangInstanceIdentifier.EMPTY); + writeTransaction.write(YangInstanceIdentifier.EMPTY, expected); - commitTransaction(writeTransaction); + commitTransaction(store, writeTransaction); - NormalizedNode actual = readStore(store); + final NormalizedNode actual = readStore(store, YangInstanceIdentifier.EMPTY); assertEquals(expected, actual); - } - private NormalizedNode readStore(InMemoryDOMDataStore store) throws ReadFailedException { - DOMStoreReadTransaction transaction = store.newReadOnlyTransaction(); - CheckedFuture>, ReadFailedException> read = - transaction.read(YangInstanceIdentifier.builder().build()); + @Test + public void testRecoveryApplicable() { + + final DatastoreContext persistentContext = DatastoreContext.newBuilder() + .shardJournalRecoveryLogBatchSize(3).shardSnapshotBatchCount(5000).persistent(true).build(); + + final Props persistentProps = Shard.builder().id(shardID).datastoreContext(persistentContext) + .schemaContext(SCHEMA_CONTEXT).props(); + + final DatastoreContext nonPersistentContext = DatastoreContext.newBuilder() + .shardJournalRecoveryLogBatchSize(3).shardSnapshotBatchCount(5000).persistent(false).build(); + + final Props nonPersistentProps = Shard.builder().id(shardID).datastoreContext(nonPersistentContext) + .schemaContext(SCHEMA_CONTEXT).props(); - Optional> optional = read.checkedGet(); + new ShardTestKit(getSystem()) { + { + final TestActorRef shard1 = actorFactory.createTestActor(persistentProps, "testPersistence1"); - NormalizedNode normalizedNode = optional.get(); + assertTrue("Recovery Applicable", shard1.underlyingActor().persistence().isRecoveryApplicable()); - transaction.close(); + final TestActorRef shard2 = actorFactory.createTestActor(nonPersistentProps, "testPersistence2"); - return normalizedNode; + assertFalse("Recovery Not Applicable", shard2.underlyingActor().persistence().isRecoveryApplicable()); + } + }; } - private void commitTransaction(DOMStoreWriteTransaction transaction) { - DOMStoreThreePhaseCommitCohort commitCohort = transaction.ready(); - ListenableFuture future = - commitCohort.preCommit(); - try { - future.get(); - future = commitCohort.commit(); - future.get(); - } catch (InterruptedException | ExecutionException e) { - } + @Test + public void testOnDatastoreContext() { + new ShardTestKit(getSystem()) { + { + dataStoreContextBuilder.persistent(true); + + final TestActorRef shard = actorFactory.createTestActor(newShardProps(), + "testOnDatastoreContext"); + + assertEquals("isRecoveryApplicable", true, + shard.underlyingActor().persistence().isRecoveryApplicable()); + + waitUntilLeader(shard); + + shard.tell(dataStoreContextBuilder.persistent(false).build(), ActorRef.noSender()); + + assertEquals("isRecoveryApplicable", false, + shard.underlyingActor().persistence().isRecoveryApplicable()); + + shard.tell(dataStoreContextBuilder.persistent(true).build(), ActorRef.noSender()); + + assertEquals("isRecoveryApplicable", true, + shard.underlyingActor().persistence().isRecoveryApplicable()); + } + }; } - private AsyncDataChangeListener> noOpDataChangeListener() { - return new AsyncDataChangeListener>() { - @Override - public void onDataChanged( - AsyncDataChangeEvent> change) { + @Test + public void testRegisterRoleChangeListener() throws Exception { + new ShardTestKit(getSystem()) { + { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testRegisterRoleChangeListener"); + + waitUntilLeader(shard); + final TestActorRef listener = + TestActorRef.create(getSystem(), Props.create(MessageCollectorActor.class)); + + shard.tell(new RegisterRoleChangeListener(), listener); + + MessageCollectorActor.expectFirstMatching(listener, RegisterRoleChangeListenerReply.class); + + ShardLeaderStateChanged leaderStateChanged = MessageCollectorActor.expectFirstMatching(listener, + ShardLeaderStateChanged.class); + assertEquals("getLocalShardDataTree present", true, + leaderStateChanged.getLocalShardDataTree().isPresent()); + assertSame("getLocalShardDataTree", shard.underlyingActor().getDataStore().getDataTree(), + leaderStateChanged.getLocalShardDataTree().get()); + + MessageCollectorActor.clearMessages(listener); + + // Force a leader change + + shard.tell(new RequestVote(10000, "member2", 50, 50), getRef()); + + leaderStateChanged = MessageCollectorActor.expectFirstMatching(listener, + ShardLeaderStateChanged.class); + assertEquals("getLocalShardDataTree present", false, + leaderStateChanged.getLocalShardDataTree().isPresent()); } }; } - private NormalizedNode readStore(TestActorRef shard, YangInstanceIdentifier id) - throws ExecutionException, InterruptedException { - DOMStoreReadTransaction transaction = shard.underlyingActor().getDataStore().newReadOnlyTransaction(); + @Test + public void testFollowerInitialSyncStatus() throws Exception { + final TestActorRef shard = actorFactory.createTestActor( + newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testFollowerInitialSyncStatus"); - CheckedFuture>, ReadFailedException> future = - transaction.read(id); + shard.underlyingActor().handleNonRaftCommand(new FollowerInitialSyncUpStatus(false, + "member-1-shard-inventory-operational")); - Optional> optional = future.get(); - NormalizedNode node = optional.isPresent()? optional.get() : null; + assertEquals(false, shard.underlyingActor().getShardMBean().getFollowerInitialSyncStatus()); - transaction.close(); + shard.underlyingActor().handleNonRaftCommand(new FollowerInitialSyncUpStatus(true, + "member-1-shard-inventory-operational")); - return node; + assertEquals(true, shard.underlyingActor().getShardMBean().getFollowerInitialSyncStatus()); } - private void writeToStore(TestActorRef shard, YangInstanceIdentifier id, NormalizedNode node) - throws ExecutionException, InterruptedException { - DOMStoreWriteTransaction transaction = shard.underlyingActor().getDataStore().newWriteOnlyTransaction(); + @Test + public void testClusteredDataChangeListenerDelayedRegistration() throws Exception { + new ShardTestKit(getSystem()) { + { + final String testName = "testClusteredDataChangeListenerDelayedRegistration"; + dataStoreContextBuilder.shardElectionTimeoutFactor(1000) + .customRaftPolicyImplementation(DisableElectionsRaftPolicy.class.getName()); + + final MockDataChangeListener listener = new MockDataChangeListener(1); + final ActorRef dclActor = actorFactory.createActor(DataChangeListener.props(listener), + actorFactory.generateActorId(testName + "-DataChangeListener")); + + setupInMemorySnapshotStore(); + + final TestActorRef shard = actorFactory.createTestActor( + newShardBuilder().props().withDispatcher(Dispatchers.DefaultDispatcherId()), + actorFactory.generateActorId(testName + "-shard")); + + waitUntilNoLeader(shard); + + final YangInstanceIdentifier path = TestModel.TEST_PATH; + + shard.tell(new RegisterChangeListener(path, dclActor, AsyncDataBroker.DataChangeScope.BASE, true), + getRef()); + final RegisterChangeListenerReply reply = expectMsgClass(duration("5 seconds"), + RegisterChangeListenerReply.class); + assertNotNull("getListenerRegistrationPath", reply.getListenerRegistrationPath()); - transaction.write(id, node); + shard.tell(DatastoreContext.newBuilderFrom(dataStoreContextBuilder.build()) + .customRaftPolicyImplementation(null).build(), ActorRef.noSender()); - DOMStoreThreePhaseCommitCohort commitCohort = transaction.ready(); - commitCohort.preCommit().get(); - commitCohort.commit().get(); + listener.waitForChangeEvents(); + } + }; } - private static final class DelegatingShardCreator implements Creator { - private final Creator delegate; + @Test + public void testClusteredDataChangeListenerRegistration() throws Exception { + new ShardTestKit(getSystem()) { + { + final String testName = "testClusteredDataChangeListenerRegistration"; + final ShardIdentifier followerShardID = ShardIdentifier.create("inventory", + MemberName.forName(actorFactory.generateActorId(testName + "-follower")), "config"); + + final ShardIdentifier leaderShardID = ShardIdentifier.create("inventory", + MemberName.forName(actorFactory.generateActorId(testName + "-leader")), "config"); + + final TestActorRef followerShard = actorFactory + .createTestActor(Shard.builder().id(followerShardID) + .datastoreContext(dataStoreContextBuilder.shardElectionTimeoutFactor(1000).build()) + .peerAddresses(Collections.singletonMap(leaderShardID.toString(), + "akka://test/user/" + leaderShardID.toString())) + .schemaContext(SCHEMA_CONTEXT).props() + .withDispatcher(Dispatchers.DefaultDispatcherId()), followerShardID.toString()); + + final TestActorRef leaderShard = actorFactory + .createTestActor(Shard.builder().id(leaderShardID).datastoreContext(newDatastoreContext()) + .peerAddresses(Collections.singletonMap(followerShardID.toString(), + "akka://test/user/" + followerShardID.toString())) + .schemaContext(SCHEMA_CONTEXT).props() + .withDispatcher(Dispatchers.DefaultDispatcherId()), leaderShardID.toString()); + + leaderShard.tell(TimeoutNow.INSTANCE, ActorRef.noSender()); + final String leaderPath = waitUntilLeader(followerShard); + assertEquals("Shard leader path", leaderShard.path().toString(), leaderPath); + + final YangInstanceIdentifier path = TestModel.TEST_PATH; + final MockDataChangeListener listener = new MockDataChangeListener(1); + final ActorRef dclActor = actorFactory.createActor(DataChangeListener.props(listener), + actorFactory.generateActorId(testName + "-DataChangeListener")); + + followerShard.tell( + new RegisterChangeListener(path, dclActor, AsyncDataBroker.DataChangeScope.BASE, true), + getRef()); + final RegisterChangeListenerReply reply = expectMsgClass(duration("5 seconds"), + RegisterChangeListenerReply.class); + assertNotNull("getListenerRegistratioznPath", reply.getListenerRegistrationPath()); - DelegatingShardCreator(Creator delegate) { - this.delegate = delegate; - } + writeToStore(followerShard, path, ImmutableNodes.containerNode(TestModel.TEST_QNAME)); - @Override - public Shard create() throws Exception { - return delegate.create(); - } + listener.waitForChangeEvents(); + } + }; + } + + @Test + public void testClusteredDataTreeChangeListenerDelayedRegistration() throws Exception { + new ShardTestKit(getSystem()) { + { + final String testName = "testClusteredDataTreeChangeListenerDelayedRegistration"; + dataStoreContextBuilder.shardElectionTimeoutFactor(1000) + .customRaftPolicyImplementation(DisableElectionsRaftPolicy.class.getName()); + + final MockDataTreeChangeListener listener = new MockDataTreeChangeListener(1); + final ActorRef dclActor = actorFactory.createActor(DataTreeChangeListenerActor.props(listener), + actorFactory.generateActorId(testName + "-DataTreeChangeListener")); + + setupInMemorySnapshotStore(); + + final TestActorRef shard = actorFactory.createTestActor( + newShardBuilder().props().withDispatcher(Dispatchers.DefaultDispatcherId()), + actorFactory.generateActorId(testName + "-shard")); + + waitUntilNoLeader(shard); + + shard.tell(new RegisterDataTreeChangeListener(TestModel.TEST_PATH, dclActor, true), getRef()); + final RegisterDataTreeChangeListenerReply reply = expectMsgClass(duration("5 seconds"), + RegisterDataTreeChangeListenerReply.class); + assertNotNull("getListenerRegistrationPath", reply.getListenerRegistrationPath()); + + shard.tell(DatastoreContext.newBuilderFrom(dataStoreContextBuilder.build()) + .customRaftPolicyImplementation(null).build(), ActorRef.noSender()); + + listener.waitForChangeEvents(); + } + }; + } + + @Test + public void testClusteredDataTreeChangeListenerRegistration() throws Exception { + new ShardTestKit(getSystem()) { + { + final String testName = "testClusteredDataTreeChangeListenerRegistration"; + final ShardIdentifier followerShardID = ShardIdentifier.create("inventory", + MemberName.forName(actorFactory.generateActorId(testName + "-follower")), "config"); + + final ShardIdentifier leaderShardID = ShardIdentifier.create("inventory", + MemberName.forName(actorFactory.generateActorId(testName + "-leader")), "config"); + + final TestActorRef followerShard = actorFactory + .createTestActor(Shard.builder().id(followerShardID) + .datastoreContext(dataStoreContextBuilder.shardElectionTimeoutFactor(1000).build()) + .peerAddresses(Collections.singletonMap(leaderShardID.toString(), + "akka://test/user/" + leaderShardID.toString())) + .schemaContext(SCHEMA_CONTEXT).props() + .withDispatcher(Dispatchers.DefaultDispatcherId()), followerShardID.toString()); + + final TestActorRef leaderShard = actorFactory + .createTestActor(Shard.builder().id(leaderShardID).datastoreContext(newDatastoreContext()) + .peerAddresses(Collections.singletonMap(followerShardID.toString(), + "akka://test/user/" + followerShardID.toString())) + .schemaContext(SCHEMA_CONTEXT).props() + .withDispatcher(Dispatchers.DefaultDispatcherId()), leaderShardID.toString()); + + leaderShard.tell(TimeoutNow.INSTANCE, ActorRef.noSender()); + final String leaderPath = waitUntilLeader(followerShard); + assertEquals("Shard leader path", leaderShard.path().toString(), leaderPath); + + final YangInstanceIdentifier path = TestModel.TEST_PATH; + final MockDataTreeChangeListener listener = new MockDataTreeChangeListener(1); + final ActorRef dclActor = actorFactory.createActor(DataTreeChangeListenerActor.props(listener), + actorFactory.generateActorId(testName + "-DataTreeChangeListener")); + + followerShard.tell(new RegisterDataTreeChangeListener(TestModel.TEST_PATH, dclActor, true), getRef()); + final RegisterDataTreeChangeListenerReply reply = expectMsgClass(duration("5 seconds"), + RegisterDataTreeChangeListenerReply.class); + assertNotNull("getListenerRegistrationPath", reply.getListenerRegistrationPath()); + + writeToStore(followerShard, path, ImmutableNodes.containerNode(TestModel.TEST_QNAME)); + + listener.waitForChangeEvents(); + } + }; + } + + @Test + public void testServerRemoved() throws Exception { + final TestActorRef parent = actorFactory.createTestActor(MessageCollectorActor.props()); + + final ActorRef shard = parent.underlyingActor().context().actorOf( + newShardBuilder().props().withDispatcher(Dispatchers.DefaultDispatcherId()), + "testServerRemoved"); + + shard.tell(new ServerRemoved("test"), ActorRef.noSender()); + + MessageCollectorActor.expectFirstMatching(parent, ServerRemoved.class); } }