import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
-import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import org.opendaylight.yangtools.yang.data.impl.schema.builder.impl.ImmutableContainerNodeBuilder;
import org.opendaylight.yangtools.yang.data.impl.schema.tree.InMemoryDataTreeFactory;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
"testRegisterChangeListener-DataChangeListener");
shard.tell(new RegisterChangeListener(TestModel.TEST_PATH,
- dclActor, AsyncDataBroker.DataChangeScope.BASE), getRef());
+ dclActor, AsyncDataBroker.DataChangeScope.BASE, true), getRef());
final RegisterChangeListenerReply reply = expectMsgClass(duration("3 seconds"),
RegisterChangeListenerReply.class);
// this will cause all other messages to not be queued properly after that.
// The basic issue is that you cannot use TestActorRef with a persistent actor (at least when
// it does do a persist)
- return new Shard(shardID, Collections.<String,String>emptyMap(),
- dataStoreContextBuilder.persistent(false).build(), SCHEMA_CONTEXT) {
+ return new Shard(newShardBuilder()) {
@Override
public void onReceiveCommand(final Object message) throws Exception {
if(message instanceof ElectionTimeout && firstElectionTimeout) {
"testRegisterChangeListenerWhenNotLeaderInitially-DataChangeListener");
final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
- Props.create(new DelegatingShardCreator(creator)),
+ Props.create(new DelegatingShardCreator(creator)).withDispatcher(Dispatchers.DefaultDispatcherId()),
"testRegisterChangeListenerWhenNotLeaderInitially");
// Write initial data into the in-memory store.
// Now send the RegisterChangeListener and wait for the reply.
shard.tell(new RegisterChangeListener(path, dclActor,
- AsyncDataBroker.DataChangeScope.SUBTREE), getRef());
+ AsyncDataBroker.DataChangeScope.SUBTREE, false), getRef());
final RegisterChangeListenerReply reply = expectMsgClass(duration("5 seconds"),
RegisterChangeListenerReply.class);
@Override
public Shard create() throws Exception {
- return new Shard(shardID, Collections.<String,String>emptyMap(),
- dataStoreContextBuilder.persistent(false).build(), SCHEMA_CONTEXT) {
+ return new Shard(Shard.builder().id(shardID).datastoreContext(
+ dataStoreContextBuilder.persistent(false).build()).schemaContext(SCHEMA_CONTEXT)) {
@Override
public void onReceiveCommand(final Object message) throws Exception {
if(message instanceof ElectionTimeout && firstElectionTimeout) {
"testDataTreeChangeListenerNotifiedWhenNotTheLeaderOnRegistration-DataChangeListener");
final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
- Props.create(new DelegatingShardCreator(creator)),
+ Props.create(new DelegatingShardCreator(creator)).withDispatcher(Dispatchers.DefaultDispatcherId()),
"testDataTreeChangeListenerNotifiedWhenNotTheLeaderOnRegistration");
final YangInstanceIdentifier path = TestModel.TEST_PATH;
writeToStore(shard, path, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
assertEquals("Got first ElectionTimeout", true,
- onFirstElectionTimeout.await(5, TimeUnit.SECONDS));
+ onFirstElectionTimeout.await(5, TimeUnit.SECONDS));
shard.tell(new RegisterDataTreeChangeListener(path, dclActor), getRef());
final RegisterDataTreeChangeListenerReply reply = expectMsgClass(duration("5 seconds"),
- RegisterDataTreeChangeListenerReply.class);
+ RegisterDataTreeChangeListenerReply.class);
assertNotNull("getListenerRegistratioznPath", reply.getListenerRegistrationPath());
shard.tell(new FindLeader(), getRef());
final CountDownLatch recoveryComplete = new CountDownLatch(1);
class TestShard extends Shard {
TestShard() {
- super(shardID, Collections.<String, String>singletonMap(shardID.toString(), null),
- newDatastoreContext(), SCHEMA_CONTEXT);
+ super(Shard.builder().id(shardID).datastoreContext(newDatastoreContext()).
+ peerAddresses(Collections.<String, String>singletonMap(shardID.toString(), null)).
+ schemaContext(SCHEMA_CONTEXT));
}
- Map<String, String> getPeerAddresses() {
- return getRaftActorContext().getPeerAddresses();
+ String getPeerAddress(String id) {
+ return getRaftActorContext().getPeerAddress(id);
}
@Override
}
})), "testPeerAddressResolved");
- //waitUntilLeader(shard);
assertEquals("Recovery complete", true,
- Uninterruptibles.awaitUninterruptibly(recoveryComplete, 5, TimeUnit.SECONDS));
+ Uninterruptibles.awaitUninterruptibly(recoveryComplete, 5, TimeUnit.SECONDS));
final String address = "akka://foobar";
shard.underlyingActor().onReceiveCommand(new PeerAddressResolved(shardID.toString(), address));
- assertEquals("getPeerAddresses", address,
- ((TestShard)shard.underlyingActor()).getPeerAddresses().get(shardID.toString()));
+ assertEquals("getPeerAddress", address,
+ ((TestShard) shard.underlyingActor()).getPeerAddress(shardID.toString()));
shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
}};
final NormalizedNode<?, ?> root = readStore(testStore, YangInstanceIdentifier.builder().build());
InMemorySnapshotStore.addSnapshot(shardID.toString(), Snapshot.create(
- SerializationUtils.serializeNormalizedNode(root),
- Collections.<ReplicatedLogEntry>emptyList(), 0, 1, -1, -1));
+ SerializationUtils.serializeNormalizedNode(root),
+ Collections.<ReplicatedLogEntry>emptyList(), 0, 1, -1, -1));
return testStore;
}
}
InMemoryJournal.addEntry(shardID.toString(), nListEntries + 2,
- new ApplyJournalEntries(nListEntries));
+ new ApplyJournalEntries(nListEntries));
testRecovery(listEntryKeys);
}
InMemoryJournal.addEntry(shardID.toString(), 0, DUMMY_DATA);
InMemoryJournal.addEntry(shardID.toString(), 1, new ReplicatedLogImplEntry(0, 1, newModificationPayload(
- new WriteModification(TestModel.OUTER_LIST_PATH,
- ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build()))));
+ new WriteModification(TestModel.OUTER_LIST_PATH,
+ ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build()))));
final int nListEntries = 16;
final Set<Integer> listEntryKeys = new HashSet<>();
// Send a couple more BatchedModifications.
shard.tell(newBatchedModifications(transactionID, TestModel.OUTER_LIST_PATH,
- ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(), false, false, 2), getRef());
+ ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(), false, false, 2), getRef());
expectMsgClass(duration, BatchedModificationsReply.class);
shard.tell(newBatchedModifications(transactionID, YangInstanceIdentifier.builder(
TestModel.OUTER_LIST_PATH).nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1).build(),
- ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1), true, true, 3), getRef());
+ ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1), true, true, 3), getRef());
expectMsgClass(duration, CommitTransactionReply.SERIALIZABLE_CLASS);
@Override
public Shard create() throws Exception {
- return new Shard(shardID, Collections.<String,String>emptyMap(),
- newDatastoreContext(), SCHEMA_CONTEXT) {
+ return new Shard(newShardBuilder()) {
@Override
protected boolean isLeader() {
return overrideLeaderCalls.get() ? false : super.isLeader();
final ShardDataTree dataStore = shard.underlyingActor().getDataStore();
- final DataTreeModification modification = dataStore.getDataTree().takeSnapshot().newModification();
+ final DataTreeModification modification = dataStore.newModification();
final ContainerNode writeData = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
new WriteModification(TestModel.TEST_PATH, writeData).apply(modification);
final ShardDataTree dataStore = shard.underlyingActor().getDataStore();
- final DataTreeModification modification = dataStore.getDataTree().takeSnapshot().newModification();
+ final DataTreeModification modification = dataStore.newModification();
final ContainerNode writeData = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
new WriteModification(TestModel.TEST_PATH, writeData).apply(modification);
final MutableCompositeModification modification = new MutableCompositeModification();
final NormalizedNode<?, ?> containerNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
final ShardDataTreeCohort cohort = setupMockWriteTransaction("cohort", dataStore,
- TestModel.TEST_PATH, containerNode, modification);
+ TestModel.TEST_PATH, containerNode, modification);
final FiniteDuration duration = duration("5 seconds");
// by the ShardTransaction.
shard.tell(new ForwardedReadyTransaction(transactionID, CURRENT_VERSION,
- cohort, modification, true, false), getRef());
+ cohort, modification, true, false), getRef());
expectMsgClass(duration, ReadyTransactionReply.class);
// Send the CanCommitTransaction message.
shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef());
final CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
- expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
assertEquals("Can commit", true, canCommitReply.getCanCommit());
// Send the CanCommitTransaction message for the 2nd Tx. This should get queued and
doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort).canCommit();
shard.tell(new ForwardedReadyTransaction(transactionID2, CURRENT_VERSION,
- cohort, modification, true, false), getRef());
+ cohort, modification, true, false), getRef());
expectMsgClass(duration, ReadyTransactionReply.class);
shard.tell(new CanCommitTransaction(transactionID2).toSerializable(), getRef());
modification, preCommit);
shard.tell(new ForwardedReadyTransaction(transactionID, CURRENT_VERSION,
- cohort, modification, true, false), getRef());
+ cohort, modification, true, false), getRef());
expectMsgClass(duration, ReadyTransactionReply.class);
shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
final CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
- expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
assertEquals("Can commit", true, canCommitReply.getCanCommit());
shard.tell(new CommitTransaction(transactionID).toSerializable(), getRef());
TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), modification1);
shard.tell(new ForwardedReadyTransaction(transactionID1, CURRENT_VERSION,
- cohort1, modification1, true, false), getRef());
+ cohort1, modification1, true, false), getRef());
expectMsgClass(duration, ReadyTransactionReply.class);
final String transactionID2 = "tx2";
TestModel.TEST2_PATH, ImmutableNodes.containerNode(TestModel.TEST2_QNAME), modification3);
shard.tell(new ForwardedReadyTransaction(transactionID3, CURRENT_VERSION,
- cohort3, modification3, true, false), getRef());
+ cohort3, modification3, true, false), getRef());
expectMsgClass(duration, ReadyTransactionReply.class);
// All Tx's are readied. We'll send canCommit for the last one but not the others. The others
// Ready the third Tx.
final String transactionID3 = "tx3";
- final DataTreeModification modification3 = dataStore.getDataTree().takeSnapshot().newModification();
+ final DataTreeModification modification3 = dataStore.newModification();
new WriteModification(TestModel.TEST2_PATH, ImmutableNodes.containerNode(TestModel.TEST2_QNAME))
.apply(modification3);
modification3.ready();
final Creator<Shard> creator = new Creator<Shard>() {
@Override
public Shard create() throws Exception {
- return new Shard(shardID, Collections.<String,String>emptyMap(),
- dataStoreContextBuilder.build(), SCHEMA_CONTEXT) {
+ return new Shard(newShardBuilder()) {
@Override
public void onReceiveCommand(final Object message) throws Exception {
super.onReceiveCommand(message);
new ShardTestKit(getSystem()) {{
class TestShard extends Shard {
- protected TestShard(final ShardIdentifier name, final Map<String, String> peerAddresses,
- final DatastoreContext datastoreContext, final SchemaContext schemaContext) {
- super(name, peerAddresses, datastoreContext, schemaContext);
+ protected TestShard(AbstractBuilder<?, ?> builder) {
+ super(builder);
setPersistence(new TestPersistentDataProvider(super.persistence()));
}
final Creator<Shard> creator = new Creator<Shard>() {
@Override
public Shard create() throws Exception {
- return new TestShard(shardID, Collections.<String,String>emptyMap(),
- newDatastoreContext(), SCHEMA_CONTEXT);
+ return new TestShard(newShardBuilder());
}
};
Props.create(new DelegatingShardCreator(creator)), shardActorName);
waitUntilLeader(shard);
-
writeToStore(shard, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
final NormalizedNode<?,?> expectedRoot = readStore(shard, YangInstanceIdentifier.builder().build());
// Trigger creation of a snapshot by ensuring
final RaftActorContext raftActorContext = ((TestShard) shard.underlyingActor()).getRaftActorContext();
raftActorContext.getSnapshotManager().capture(mock(ReplicatedLogEntry.class), -1);
-
- assertEquals("Snapshot saved", true, latch.get().await(5, TimeUnit.SECONDS));
-
- assertTrue("Invalid saved snapshot " + savedSnapshot.get(),
- savedSnapshot.get() instanceof Snapshot);
-
- verifySnapshot((Snapshot)savedSnapshot.get(), expectedRoot);
-
- latch.set(new CountDownLatch(1));
- savedSnapshot.set(null);
+ awaitAndValidateSnapshot(expectedRoot);
raftActorContext.getSnapshotManager().capture(mock(ReplicatedLogEntry.class), -1);
+ awaitAndValidateSnapshot(expectedRoot);
- assertEquals("Snapshot saved", true, latch.get().await(5, TimeUnit.SECONDS));
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ }
- assertTrue("Invalid saved snapshot " + savedSnapshot.get(),
- savedSnapshot.get() instanceof Snapshot);
+ private void awaitAndValidateSnapshot(NormalizedNode<?,?> expectedRoot
+ ) throws InterruptedException {
+ System.out.println("Inside awaitAndValidateSnapshot {}" + savedSnapshot.get());
+ assertEquals("Snapshot saved", true, latch.get().await(5, TimeUnit.SECONDS));
- verifySnapshot((Snapshot)savedSnapshot.get(), expectedRoot);
+ assertTrue("Invalid saved snapshot " + savedSnapshot.get(),
+ savedSnapshot.get() instanceof Snapshot);
- shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
- }
+ verifySnapshot((Snapshot)savedSnapshot.get(), expectedRoot);
- private void verifySnapshot(final Snapshot snapshot, final NormalizedNode<?,?> expectedRoot) {
+ latch.set(new CountDownLatch(1));
+ savedSnapshot.set(null);
+ }
- final NormalizedNode<?, ?> actual = SerializationUtils.deserializeNormalizedNode(snapshot.getState());
- assertEquals("Root node", expectedRoot, actual);
+ private void verifySnapshot(final Snapshot snapshot, final NormalizedNode<?,?> expectedRoot) {
- }};
+ final NormalizedNode<?, ?> actual = SerializationUtils.deserializeNormalizedNode(snapshot.getState());
+ assertEquals("Root node", expectedRoot, actual);
+
+ }
+ };
}
/**
final DatastoreContext persistentContext = DatastoreContext.newBuilder().
shardJournalRecoveryLogBatchSize(3).shardSnapshotBatchCount(5000).persistent(true).build();
- final Props persistentProps = Shard.props(shardID, Collections.<String, String>emptyMap(),
- persistentContext, SCHEMA_CONTEXT);
+ final Props persistentProps = Shard.builder().id(shardID).datastoreContext(persistentContext).
+ schemaContext(SCHEMA_CONTEXT).props();
final DatastoreContext nonPersistentContext = DatastoreContext.newBuilder().
shardJournalRecoveryLogBatchSize(3).shardSnapshotBatchCount(5000).persistent(false).build();
- final Props nonPersistentProps = Shard.props(shardID, Collections.<String, String>emptyMap(),
- nonPersistentContext, SCHEMA_CONTEXT);
+ final Props nonPersistentProps = Shard.builder().id(shardID).datastoreContext(nonPersistentContext).
+ schemaContext(SCHEMA_CONTEXT).props();
new ShardTestKit(getSystem()) {{
final TestActorRef<Shard> shard1 = TestActorRef.create(getSystem(),
shard.tell(dataStoreContextBuilder.persistent(false).build(), ActorRef.noSender());
assertEquals("isRecoveryApplicable", false,
- shard.underlyingActor().persistence().isRecoveryApplicable());
+ shard.underlyingActor().persistence().isRecoveryApplicable());
shard.tell(dataStoreContextBuilder.persistent(true).build(), ActorRef.noSender());
assertEquals("isRecoveryApplicable", true,
- shard.underlyingActor().persistence().isRecoveryApplicable());
+ shard.underlyingActor().persistence().isRecoveryApplicable());
shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
}};
MessageCollectorActor.expectFirstMatching(listener, RegisterRoleChangeListenerReply.class);
ShardLeaderStateChanged leaderStateChanged = MessageCollectorActor.expectFirstMatching(listener,
- ShardLeaderStateChanged.class);
+ ShardLeaderStateChanged.class);
assertEquals("getLocalShardDataTree present", true,
leaderStateChanged.getLocalShardDataTree().isPresent());
assertSame("getLocalShardDataTree", shard.underlyingActor().getDataStore().getDataTree(),
- leaderStateChanged.getLocalShardDataTree().get());
+ leaderStateChanged.getLocalShardDataTree().get());
MessageCollectorActor.clearMessages(listener);
store.validate(modification);
store.commit(store.prepare(modification));
}
+
+ @Test
+ public void testClusteredDataChangeListernerDelayedRegistration() throws Exception {
+ new ShardTestKit(getSystem()) {{
+ dataStoreContextBuilder.persistent(false);
+ final CountDownLatch onFirstElectionTimeout = new CountDownLatch(1);
+ final CountDownLatch onChangeListenerRegistered = new CountDownLatch(1);
+ final Creator<Shard> creator = new Creator<Shard>() {
+ private static final long serialVersionUID = 1L;
+ boolean firstElectionTimeout = true;
+
+ @Override
+ public Shard create() throws Exception {
+ return new Shard(newShardBuilder()) {
+ @Override
+ public void onReceiveCommand(final Object message) throws Exception {
+ if(message instanceof ElectionTimeout && firstElectionTimeout) {
+ firstElectionTimeout = false;
+ final ActorRef self = getSelf();
+ new Thread() {
+ @Override
+ public void run() {
+ Uninterruptibles.awaitUninterruptibly(
+ onChangeListenerRegistered, 5, TimeUnit.SECONDS);
+ self.tell(message, self);
+ }
+ }.start();
+
+ onFirstElectionTimeout.countDown();
+ } else {
+ super.onReceiveCommand(message);
+ }
+ }
+ };
+ }
+ };
+
+ final MockDataChangeListener listener = new MockDataChangeListener(1);
+ final ActorRef dclActor = getSystem().actorOf(DataChangeListener.props(listener),
+ "testDataChangeListenerOnFollower-DataChangeListener");
+
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ Props.create(new DelegatingShardCreator(creator)).withDispatcher(Dispatchers.DefaultDispatcherId()).
+ withDispatcher(Dispatchers.DefaultDispatcherId()),"testDataChangeListenerOnFollower");
+
+ assertEquals("Got first ElectionTimeout", true,
+ onFirstElectionTimeout.await(5, TimeUnit.SECONDS));
+
+ shard.tell(new FindLeader(), getRef());
+ final FindLeaderReply findLeadeReply =
+ expectMsgClass(duration("5 seconds"), FindLeaderReply.class);
+ assertNull("Expected the shard not to be the leader", findLeadeReply.getLeaderActor());
+
+ final YangInstanceIdentifier path = TestModel.TEST_PATH;
+
+ shard.tell(new RegisterChangeListener(path, dclActor, AsyncDataBroker.DataChangeScope.BASE, true), getRef());
+ final RegisterChangeListenerReply reply = expectMsgClass(duration("5 seconds"),
+ RegisterChangeListenerReply.class);
+ assertNotNull("getListenerRegistratioznPath", reply.getListenerRegistrationPath());
+
+ writeToStore(shard, path, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
+
+ onChangeListenerRegistered.countDown();
+
+ listener.waitForChangeEvents();
+
+ dclActor.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ }};
+ }
+
+ @Test
+ public void testClusteredDataChangeListernerRegistration() throws Exception {
+ dataStoreContextBuilder.persistent(false).build();
+ new ShardTestKit(getSystem()) {{
+ final ShardIdentifier member1ShardID = ShardIdentifier.builder().memberName("member-1")
+ .shardName("inventory").type("config").build();
+
+ final ShardIdentifier member2ShardID = ShardIdentifier.builder().memberName("member-2")
+ .shardName("inventory").type("config").build();
+ final Creator<Shard> followerShardCreator = new Creator<Shard>() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public Shard create() throws Exception {
+ return new Shard(Shard.builder().id(member1ShardID).datastoreContext(newDatastoreContext()).
+ peerAddresses(Collections.singletonMap(member2ShardID.toString(),
+ "akka://test/user/" + member2ShardID.toString())).schemaContext(SCHEMA_CONTEXT)) {
+ @Override
+ public void onReceiveCommand(final Object message) throws Exception {
+
+ if(!(message instanceof ElectionTimeout)) {
+ super.onReceiveCommand(message);
+ }
+ }
+ };
+ }
+ };
+
+ final Creator<Shard> leaderShardCreator = new Creator<Shard>() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public Shard create() throws Exception {
+ return new Shard(Shard.builder().id(member2ShardID).datastoreContext(newDatastoreContext()).
+ peerAddresses(Collections.singletonMap(member1ShardID.toString(),
+ "akka://test/user/" + member1ShardID.toString())).schemaContext(SCHEMA_CONTEXT)) {};
+ }
+ };
+
+
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ Props.create(new DelegatingShardCreator(followerShardCreator)),
+ member1ShardID.toString());
+
+ final TestActorRef<Shard> shardLeader = TestActorRef.create(getSystem(),
+ Props.create(new DelegatingShardCreator(leaderShardCreator)).withDispatcher(Dispatchers.DefaultDispatcherId()),
+ member2ShardID.toString());
+ // Sleep to let election happen
+ Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS);
+
+ shard.tell(new FindLeader(), getRef());
+ final FindLeaderReply findLeaderReply =
+ expectMsgClass(duration("5 seconds"), FindLeaderReply.class);
+ assertEquals("Shard leader does not match", shardLeader.path().toString(), findLeaderReply.getLeaderActor());
+
+ final YangInstanceIdentifier path = TestModel.TEST_PATH;
+ final MockDataChangeListener listener = new MockDataChangeListener(1);
+ final ActorRef dclActor = getSystem().actorOf(DataChangeListener.props(listener),
+ "testDataChangeListenerOnFollower-DataChangeListener");
+
+ shard.tell(new RegisterChangeListener(path, dclActor, AsyncDataBroker.DataChangeScope.BASE, true), getRef());
+ final RegisterChangeListenerReply reply = expectMsgClass(duration("5 seconds"),
+ RegisterChangeListenerReply.class);
+ assertNotNull("getListenerRegistratioznPath", reply.getListenerRegistrationPath());
+
+ writeToStore(shard, path, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
+
+ listener.waitForChangeEvents();
+
+ dclActor.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ }};
+ }
}