Use UnsignedLongSet instead of RangeSet in metadata
[controller.git] / opendaylight / md-sal / sal-distributed-datastore / src / test / java / org / opendaylight / controller / cluster / datastore / DistributedDataStoreRemotingIntegrationTest.java
index e403bbc20a2ccc5837121d6e502eaa72d2b53c2d..da219faaf1cdd893fc67bf687145b7798849faee 100644 (file)
@@ -8,6 +8,7 @@
 package org.opendaylight.controller.cluster.datastore;
 
 import static org.awaitility.Awaitility.await;
+import static org.hamcrest.Matchers.equalTo;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
@@ -31,7 +32,6 @@ import akka.testkit.javadsl.TestKit;
 import com.google.common.base.Stopwatch;
 import com.google.common.base.Throwables;
 import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Range;
 import com.google.common.primitives.UnsignedLong;
 import com.google.common.util.concurrent.ListenableFuture;
 import com.google.common.util.concurrent.MoreExecutors;
@@ -44,9 +44,11 @@ import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Optional;
-import java.util.Set;
 import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.function.Supplier;
 import org.junit.After;
@@ -67,6 +69,8 @@ import org.opendaylight.controller.cluster.databroker.ConcurrentDOMDataBroker;
 import org.opendaylight.controller.cluster.databroker.TestClientBackedDataStore;
 import org.opendaylight.controller.cluster.datastore.DatastoreContext.Builder;
 import org.opendaylight.controller.cluster.datastore.TestShard.RequestFrontendMetadata;
+import org.opendaylight.controller.cluster.datastore.TestShard.StartDropMessages;
+import org.opendaylight.controller.cluster.datastore.TestShard.StopDropMessages;
 import org.opendaylight.controller.cluster.datastore.exceptions.NoShardLeaderException;
 import org.opendaylight.controller.cluster.datastore.exceptions.ShardLeaderNotRespondingException;
 import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply;
@@ -84,6 +88,7 @@ import org.opendaylight.controller.cluster.raft.base.messages.TimeoutNow;
 import org.opendaylight.controller.cluster.raft.client.messages.GetOnDemandRaftState;
 import org.opendaylight.controller.cluster.raft.client.messages.OnDemandRaftState;
 import org.opendaylight.controller.cluster.raft.client.messages.Shutdown;
+import org.opendaylight.controller.cluster.raft.messages.AppendEntries;
 import org.opendaylight.controller.cluster.raft.messages.RequestVote;
 import org.opendaylight.controller.cluster.raft.persisted.ApplyJournalEntries;
 import org.opendaylight.controller.cluster.raft.persisted.Snapshot;
@@ -108,15 +113,16 @@ import org.opendaylight.yangtools.yang.common.Uint64;
 import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
 import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
 import org.opendaylight.yangtools.yang.data.api.schema.MapEntryNode;
-import org.opendaylight.yangtools.yang.data.api.schema.MapNode;
 import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+import org.opendaylight.yangtools.yang.data.api.schema.SystemMapNode;
+import org.opendaylight.yangtools.yang.data.api.schema.builder.CollectionNodeBuilder;
 import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTree;
 import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeConfiguration;
 import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeModification;
 import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
-import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.CollectionNodeBuilder;
 import org.opendaylight.yangtools.yang.data.impl.schema.builder.impl.ImmutableContainerNodeBuilder;
 import org.opendaylight.yangtools.yang.data.impl.schema.tree.InMemoryDataTreeFactory;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
 import scala.concurrent.Await;
 import scala.concurrent.Future;
 import scala.concurrent.duration.FiniteDuration;
@@ -196,9 +202,9 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
             leaderDistributedDataStore.close();
         }
 
-        TestKit.shutdownActorSystem(leaderSystem);
-        TestKit.shutdownActorSystem(followerSystem);
-        TestKit.shutdownActorSystem(follower2System);
+        TestKit.shutdownActorSystem(leaderSystem, true);
+        TestKit.shutdownActorSystem(followerSystem, true);
+        TestKit.shutdownActorSystem(follower2System,true);
 
         InMemoryJournal.clear();
         InMemorySnapshotStore.clear();
@@ -214,12 +220,19 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
 
     private void initDatastores(final String type, final String moduleShardsConfig, final String[] shards)
             throws Exception {
-        leaderTestKit = new IntegrationTestKit(leaderSystem, leaderDatastoreContextBuilder, commitTimeout);
+        initDatastores(type, moduleShardsConfig, shards, leaderDatastoreContextBuilder,
+                followerDatastoreContextBuilder);
+    }
+
+    private void initDatastores(final String type, final String moduleShardsConfig, final String[] shards,
+            final DatastoreContext.Builder leaderBuilder, final DatastoreContext.Builder followerBuilder)
+                    throws Exception {
+        leaderTestKit = new IntegrationTestKit(leaderSystem, leaderBuilder, commitTimeout);
 
         leaderDistributedDataStore = leaderTestKit.setupAbstractDataStore(
                 testParameter, type, moduleShardsConfig, false, shards);
 
-        followerTestKit = new IntegrationTestKit(followerSystem, followerDatastoreContextBuilder, commitTimeout);
+        followerTestKit = new IntegrationTestKit(followerSystem, followerBuilder, commitTimeout);
         followerDistributedDataStore = followerTestKit.setupAbstractDataStore(
                 testParameter, type, moduleShardsConfig, false, shards);
 
@@ -231,12 +244,12 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
 
     private static void verifyCars(final DOMStoreReadTransaction readTx, final MapEntryNode... entries)
             throws Exception {
-        final Optional<NormalizedNode<?, ?>> optional = readTx.read(CarsModel.CAR_LIST_PATH).get(5, TimeUnit.SECONDS);
+        final Optional<NormalizedNode> optional = readTx.read(CarsModel.CAR_LIST_PATH).get(5, TimeUnit.SECONDS);
         assertTrue("isPresent", optional.isPresent());
 
-        final CollectionNodeBuilder<MapEntryNode, MapNode> listBuilder = ImmutableNodes.mapNodeBuilder(
+        final CollectionNodeBuilder<MapEntryNode, SystemMapNode> listBuilder = ImmutableNodes.mapNodeBuilder(
                 CarsModel.CAR_QNAME);
-        for (final NormalizedNode<?, ?> entry: entries) {
+        for (final NormalizedNode entry: entries) {
             listBuilder.withChild((MapEntryNode) entry);
         }
 
@@ -244,8 +257,8 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
     }
 
     private static void verifyNode(final DOMStoreReadTransaction readTx, final YangInstanceIdentifier path,
-            final NormalizedNode<?, ?> expNode) throws Exception {
-        final Optional<NormalizedNode<?, ?>> optional = readTx.read(path).get(5, TimeUnit.SECONDS);
+            final NormalizedNode expNode) throws Exception {
+        final Optional<NormalizedNode> optional = readTx.read(path).get(5, TimeUnit.SECONDS);
         assertTrue("isPresent", optional.isPresent());
         assertEquals("Data node", expNode, optional.get());
     }
@@ -387,18 +400,22 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
                         }
 
                         assertEquals(0, metadata.getClosedTransactions().size());
-                        assertEquals(Range.closedOpen(UnsignedLong.valueOf(0), UnsignedLong.valueOf(11)),
-                                metadata.getPurgedTransactions().asRanges().iterator().next());
+
+                        final var purgedRanges = metadata.getPurgedTransactions().ranges();
+                        assertEquals(1, purgedRanges.size());
+                        final var purgedRange = purgedRanges.first();
+                        assertEquals(UnsignedLong.ZERO, purgedRange.lower());
+                        assertEquals(UnsignedLong.valueOf(10), purgedRange.upper());
                     } else {
                         // ask based should track no metadata
                         assertTrue(frontendMetadata.getClients().get(0).getCurrentHistories().isEmpty());
                     }
                 });
 
-        final Optional<NormalizedNode<?, ?>> optional = txChain.newReadOnlyTransaction()
+        final Optional<NormalizedNode> optional = txChain.newReadOnlyTransaction()
                 .read(CarsModel.CAR_LIST_PATH).get(5, TimeUnit.SECONDS);
         assertTrue("isPresent", optional.isPresent());
-        assertEquals("# cars", numCars, ((Collection<?>) optional.get().getValue()).size());
+        assertEquals("# cars", numCars, ((Collection<?>) optional.get().body()).size());
     }
 
     @Test
@@ -451,20 +468,18 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
                             metadata = iterator.next();
                         }
 
-                        Set<Range<UnsignedLong>> ranges = metadata.getPurgedTransactions().asRanges();
-
                         assertEquals(0, metadata.getClosedTransactions().size());
-                        assertEquals(1, ranges.size());
+                        assertEquals(1, metadata.getPurgedTransactions().size());
                     } else {
                         // ask based should track no metadata
                         assertTrue(frontendMetadata.getClients().get(0).getCurrentHistories().isEmpty());
                     }
                 });
 
-        final Optional<NormalizedNode<?, ?>> optional = txChain.newReadOnlyTransaction()
+        final Optional<NormalizedNode> optional = txChain.newReadOnlyTransaction()
                 .read(CarsModel.CAR_LIST_PATH).get(5, TimeUnit.SECONDS);
         assertTrue("isPresent", optional.isPresent());
-        assertEquals("# cars", numCars, ((Collection<?>) optional.get().getValue()).size());
+        assertEquals("# cars", numCars, ((Collection<?>) optional.get().body()).size());
     }
 
     @Test
@@ -501,11 +516,11 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
         assertNotNull("newWriteOnlyTransaction returned null", writeTx);
 
         final YangInstanceIdentifier carsPath = CarsModel.BASE_PATH;
-        final NormalizedNode<?, ?> carsNode = CarsModel.emptyContainer();
+        final NormalizedNode carsNode = CarsModel.emptyContainer();
         writeTx.write(carsPath, carsNode);
 
         final YangInstanceIdentifier peoplePath = PeopleModel.BASE_PATH;
-        final NormalizedNode<?, ?> peopleNode = PeopleModel.emptyContainer();
+        final NormalizedNode peopleNode = PeopleModel.emptyContainer();
         writeTx.write(peoplePath, peopleNode);
 
         followerTestKit.doCommit(writeTx.ready());
@@ -524,11 +539,11 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
         assertNotNull("newReadWriteTransaction returned null", rwTx);
 
         final YangInstanceIdentifier carsPath = CarsModel.BASE_PATH;
-        final NormalizedNode<?, ?> carsNode = CarsModel.emptyContainer();
+        final NormalizedNode carsNode = CarsModel.emptyContainer();
         rwTx.write(carsPath, carsNode);
 
         final YangInstanceIdentifier peoplePath = PeopleModel.BASE_PATH;
-        final NormalizedNode<?, ?> peopleNode = PeopleModel.emptyContainer();
+        final NormalizedNode peopleNode = PeopleModel.emptyContainer();
         rwTx.write(peoplePath, peopleNode);
 
         followerTestKit.doCommit(rwTx.ready());
@@ -615,7 +630,7 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
         final YangInstanceIdentifier personPath = PeopleModel.newPersonPath("jack");
         readWriteTx.merge(personPath, person);
 
-        Optional<NormalizedNode<?, ?>> optional = readWriteTx.read(carPath).get(5, TimeUnit.SECONDS);
+        Optional<NormalizedNode> optional = readWriteTx.read(carPath).get(5, TimeUnit.SECONDS);
         assertTrue("isPresent", optional.isPresent());
         assertEquals("Data node", car, optional.get());
 
@@ -950,7 +965,7 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
         cars.add(CarsModel.newCarEntry("car" + carIndex, Uint64.valueOf(carIndex)));
         writeTx2.write(CarsModel.newCarPath("car" + carIndex), cars.getLast());
         carIndex++;
-        NormalizedNode<?, ?> people = ImmutableNodes.mapNodeBuilder(PeopleModel.PERSON_QNAME)
+        NormalizedNode people = ImmutableNodes.mapNodeBuilder(PeopleModel.PERSON_QNAME)
                 .withChild(PeopleModel.newPersonEntry("Dude")).build();
         writeTx2.write(PeopleModel.PERSON_LIST_PATH, people);
         final DOMStoreThreePhaseCommitCohort writeTx2Cohort = writeTx2.ready();
@@ -1271,10 +1286,11 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
         followerTestKit.waitForMembersUp("member-1", "member-3");
         follower2TestKit.waitForMembersUp("member-1", "member-2");
 
-        TestKit.shutdownActorSystem(follower2System);
+        // behavior is controlled by akka.coordinated-shutdown.run-by-actor-system-terminate configuration option
+        TestKit.shutdownActorSystem(follower2System, true);
 
         ActorRef cars = leaderDistributedDataStore.getActorUtils().findLocalShard("cars").get();
-        OnDemandRaftState initialState = (OnDemandRaftState) leaderDistributedDataStore.getActorUtils()
+        final OnDemandRaftState initialState = (OnDemandRaftState) leaderDistributedDataStore.getActorUtils()
                 .executeOperation(cars, GetOnDemandRaftState.INSTANCE);
 
         Cluster leaderCluster = Cluster.get(leaderSystem);
@@ -1324,7 +1340,7 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
                 CarsModel.newCarsMapNode(CarsModel.newCarEntry("optima", Uint64.valueOf(20000))));
         AbstractShardTest.writeToStore(tree, CarsModel.BASE_PATH, carsNode);
 
-        final NormalizedNode<?, ?> snapshotRoot = AbstractShardTest.readStore(tree, YangInstanceIdentifier.empty());
+        final NormalizedNode snapshotRoot = AbstractShardTest.readStore(tree, YangInstanceIdentifier.empty());
         final Snapshot initialSnapshot = Snapshot.create(
                 new ShardSnapshotState(new MetadataShardDataTreeSnapshot(snapshotRoot)),
                 Collections.emptyList(), 5, 1, 5, 1, 1, null, null);
@@ -1335,7 +1351,7 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
 
         initDatastoresWithCars(testName);
 
-        final Optional<NormalizedNode<?, ?>> readOptional = leaderDistributedDataStore.newReadOnlyTransaction().read(
+        final Optional<NormalizedNode> readOptional = leaderDistributedDataStore.newReadOnlyTransaction().read(
                 CarsModel.BASE_PATH).get(5, TimeUnit.SECONDS);
         assertTrue("isPresent", readOptional.isPresent());
         assertEquals("Node", carsNode, readOptional.get());
@@ -1358,14 +1374,145 @@ public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
 
         final DOMStoreReadWriteTransaction rwTx = followerDistributedDataStore.newReadWriteTransaction();
 
-        final NormalizedNode<?, ?> carsNode = CarsModel.create();
+        final NormalizedNode carsNode = CarsModel.create();
         rwTx.write(CarsModel.BASE_PATH, carsNode);
 
         verifyNode(rwTx, CarsModel.BASE_PATH, carsNode);
     }
 
+    @SuppressWarnings("IllegalCatch")
+    @Test
+    public void testRaftCallbackDuringLeadershipDrop() throws Exception {
+        final String testName = "testRaftCallbackDuringLeadershipDrop";
+        initDatastores(testName, MODULE_SHARDS_CARS_1_2_3, CARS);
+
+        final ExecutorService executor = Executors.newSingleThreadExecutor();
+
+        final IntegrationTestKit follower2TestKit = new IntegrationTestKit(follower2System,
+                DatastoreContext.newBuilderFrom(followerDatastoreContextBuilder.build()).operationTimeoutInMillis(500)
+                        .shardLeaderElectionTimeoutInSeconds(3600),
+                commitTimeout);
+
+        final DOMStoreWriteTransaction initialWriteTx = leaderDistributedDataStore.newWriteOnlyTransaction();
+        initialWriteTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
+        leaderTestKit.doCommit(initialWriteTx.ready());
+
+        try (AbstractDataStore follower2DistributedDataStore = follower2TestKit.setupAbstractDataStore(
+                testParameter, testName, MODULE_SHARDS_CARS_1_2_3, false)) {
+
+            final ActorRef member3Cars = ((LocalShardStore) follower2DistributedDataStore).getLocalShards()
+                    .getLocalShards().get("cars").getActor();
+            final ActorRef member2Cars = ((LocalShardStore)followerDistributedDataStore).getLocalShards()
+                    .getLocalShards().get("cars").getActor();
+            member2Cars.tell(new StartDropMessages(AppendEntries.class), null);
+            member3Cars.tell(new StartDropMessages(AppendEntries.class), null);
+
+            final DOMStoreWriteTransaction newTx = leaderDistributedDataStore.newWriteOnlyTransaction();
+            newTx.write(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode());
+            final AtomicBoolean submitDone = new AtomicBoolean(false);
+            executor.submit(() -> {
+                try {
+                    leaderTestKit.doCommit(newTx.ready());
+                    submitDone.set(true);
+                } catch (Exception e) {
+                    throw new RuntimeException(e);
+                }
+            });
+            final ActorRef leaderCars = ((LocalShardStore) leaderDistributedDataStore).getLocalShards()
+                    .getLocalShards().get("cars").getActor();
+            await().atMost(10, TimeUnit.SECONDS)
+                    .until(() -> ((OnDemandRaftState) leaderDistributedDataStore.getActorUtils()
+                            .executeOperation(leaderCars, GetOnDemandRaftState.INSTANCE)).getLastIndex() >= 1);
+
+            final OnDemandRaftState raftState = (OnDemandRaftState)leaderDistributedDataStore.getActorUtils()
+                    .executeOperation(leaderCars, GetOnDemandRaftState.INSTANCE);
+
+            // Simulate a follower not receiving heartbeats but still being able to send messages ie RequestVote with
+            // new term(switching to candidate after election timeout)
+            leaderCars.tell(new RequestVote(raftState.getCurrentTerm() + 1,
+                    "member-3-shard-cars-testRaftCallbackDuringLeadershipDrop", -1,
+                            -1), member3Cars);
+
+            member2Cars.tell(new StopDropMessages(AppendEntries.class), null);
+            member3Cars.tell(new StopDropMessages(AppendEntries.class), null);
+
+            await("Is tx stuck in COMMIT_PENDING")
+                    .atMost(10, TimeUnit.SECONDS).untilAtomic(submitDone, equalTo(true));
+
+        }
+
+        executor.shutdownNow();
+    }
+
+    @Test
+    public void testSnapshotOnRootOverwrite() throws Exception {
+        if (!DistributedDataStore.class.isAssignableFrom(testParameter)) {
+            // FIXME: ClientBackedDatastore does not have stable indexes/term, the snapshot index seems to fluctuate
+            return;
+        }
+
+        final String testName = "testSnapshotOnRootOverwrite";
+        final String[] shards = {"cars", "default"};
+        initDatastores(testName, "module-shards-default-cars-member1-and-2.conf", shards,
+                leaderDatastoreContextBuilder.snapshotOnRootOverwrite(true),
+                followerDatastoreContextBuilder.snapshotOnRootOverwrite(true));
+
+        leaderTestKit.waitForMembersUp("member-2");
+        final ContainerNode rootNode = ImmutableContainerNodeBuilder.create()
+                .withNodeIdentifier(YangInstanceIdentifier.NodeIdentifier.create(SchemaContext.NAME))
+                .withChild(CarsModel.create())
+                .build();
+
+        leaderTestKit.testWriteTransaction(leaderDistributedDataStore, YangInstanceIdentifier.empty(), rootNode);
+
+        IntegrationTestKit.verifyShardState(leaderDistributedDataStore, "cars",
+            state -> assertEquals(1, state.getSnapshotIndex()));
+
+        IntegrationTestKit.verifyShardState(followerDistributedDataStore, "cars",
+            state -> assertEquals(1, state.getSnapshotIndex()));
+
+        verifySnapshot("member-1-shard-cars-testSnapshotOnRootOverwrite", 1);
+        verifySnapshot("member-2-shard-cars-testSnapshotOnRootOverwrite", 1);
+
+        for (int i = 0; i < 10; i++) {
+            leaderTestKit.testWriteTransaction(leaderDistributedDataStore, CarsModel.newCarPath("car " + i),
+                    CarsModel.newCarEntry("car " + i, Uint64.ONE));
+        }
+
+        // fake snapshot causes the snapshotIndex to move
+        IntegrationTestKit.verifyShardState(leaderDistributedDataStore, "cars",
+            state -> assertEquals(10, state.getSnapshotIndex()));
+        IntegrationTestKit.verifyShardState(followerDistributedDataStore, "cars",
+            state -> assertEquals(10, state.getSnapshotIndex()));
+
+        // however the real snapshot still has not changed and was taken at index 1
+        verifySnapshot("member-1-shard-cars-testSnapshotOnRootOverwrite", 1);
+        verifySnapshot("member-2-shard-cars-testSnapshotOnRootOverwrite", 1);
+
+        // root overwrite so expect a snapshot
+        leaderTestKit.testWriteTransaction(leaderDistributedDataStore, YangInstanceIdentifier.empty(), rootNode);
+
+        // this was a real snapshot so everything should be in it(1(DisableTrackingPayload) + 1 + 10 + 1)
+        IntegrationTestKit.verifyShardState(leaderDistributedDataStore, "cars",
+            state -> assertEquals(12, state.getSnapshotIndex()));
+        IntegrationTestKit.verifyShardState(followerDistributedDataStore, "cars",
+            state -> assertEquals(12, state.getSnapshotIndex()));
+
+        verifySnapshot("member-1-shard-cars-testSnapshotOnRootOverwrite", 12);
+        verifySnapshot("member-2-shard-cars-testSnapshotOnRootOverwrite", 12);
+    }
+
+    private void verifySnapshot(final String persistenceId, final long lastAppliedIndex) {
+        await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> {
+                List<Snapshot> snap = InMemorySnapshotStore.getSnapshots(persistenceId, Snapshot.class);
+                assertEquals(1, snap.size());
+                assertEquals(lastAppliedIndex, snap.get(0).getLastAppliedIndex());
+            }
+        );
+    }
+
     private static void verifySnapshot(final Snapshot actual, final Snapshot expected,
-                                       final NormalizedNode<?, ?> expRoot) {
+                                       final NormalizedNode expRoot) {
         assertEquals("Snapshot getLastAppliedTerm", expected.getLastAppliedTerm(), actual.getLastAppliedTerm());
         assertEquals("Snapshot getLastAppliedIndex", expected.getLastAppliedIndex(), actual.getLastAppliedIndex());
         assertEquals("Snapshot getLastTerm", expected.getLastTerm(), actual.getLastTerm());