<type>xml</type>
<scope>runtime</scope>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>features-netconf-connector</artifactId>
+ <version>${mdsal.version}</version>
+ <classifier>features</classifier>
+ <type>xml</type>
+ <scope>runtime</scope>
+ </dependency>
<!-- JMH Benchmark dependencies -->
<dependency>
<groupId>org.openjdk.jmh</groupId>
the actual service-type which is actually required.";
mandatory true;
- type service-type-ref;
+ type leafref {
+ path "/config:services/config:service/config:type";
+ }
}
leaf name {
@GuardedBy("this")
private final Set<ObjectName> registeredObjectNames = new HashSet<>();
+ @GuardedBy("this")
private final List<InternalJMXRegistrator> children = new ArrayList<>();
public synchronized InternalJMXRegistration registerMBean(Object object,
}
}
- public InternalJMXRegistrator createChild() {
- InternalJMXRegistrator child = new InternalJMXRegistrator(
- configMBeanServer);
+ public synchronized InternalJMXRegistrator createChild() {
+ InternalJMXRegistrator child = new InternalJMXRegistrator(configMBeanServer);
children.add(child);
return child;
}
return getSameNames(result);
}
- private Set<ObjectName> getSameNames(Set<ObjectName> superSet) {
+ private synchronized Set<ObjectName> getSameNames(Set<ObjectName> superSet) {
Set<ObjectName> result = new HashSet<>(superSet);
result.retainAll(registeredObjectNames);
for (InternalJMXRegistrator child : children) {
public class ImmediateEventExecutorModuleTest extends AbstractConfigTest {
- private GlobalEventExecutorModuleFactory factory;
+ private ImmediateEventExecutorModuleFactory factory;
private final String instanceName = ImmediateEventExecutorModuleFactory.SINGLETON_NAME;
@Before
public void setUp() {
- factory = new GlobalEventExecutorModuleFactory();
+ factory = new ImmediateEventExecutorModuleFactory();
super.initConfigTransactionManagerImpl(new HardcodedModuleFactoriesResolver(mockedContext,factory));
}
<type>xml</type>
<scope>runtime</scope>
</dependency>
+ <!-- Netconf connector features. When this is included, users can test the netconf connector using netconf-testtool -->
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>features-netconf-connector</artifactId>
+ <classifier>features</classifier>
+ <type>xml</type>
+ </dependency>
+
</dependencies>
<build>
*/
package org.opendaylight.controller.sal.compatibility;
-import java.util.Dictionary;
-import java.util.Hashtable;
-
+import com.google.common.base.Preconditions;
import org.apache.felix.dm.Component;
import org.opendaylight.controller.clustering.services.IClusterGlobalServices;
import org.opendaylight.controller.sal.binding.api.BindingAwareBroker;
import org.opendaylight.controller.sal.utils.INodeFactory;
import org.osgi.framework.BundleContext;
-import com.google.common.base.Preconditions;
+import java.util.Dictionary;
+import java.util.Hashtable;
public class ComponentActivator extends ComponentActivatorAbstractBase {
private final INodeConnectorFactory nodeConnectorFactory = new MDSalNodeConnectorFactory();
@Override
public void start(final BundleContext context) {
- super.start(context);
this.context = Preconditions.checkNotNull(context);
+ super.start(context);
}
public ProviderContext setBroker(final BindingAwareBroker broker) {
@Override
protected Object[] getGlobalImplementations() {
return new Object[] {
+ this, // Used for setBroker callback
flow,
inventory,
dataPacket,
nodeFactory,
nodeConnectorFactory,
topology,
- tpProvider,
- this // Used for setBroker callback
+ tpProvider
};
}
.setService(IDiscoveryService.class)
.setCallbacks("setDiscoveryPublisher", "setDiscoveryPublisher")
.setRequired(false));
+ it.add(createServiceDependency()
+ .setService(BindingAwareBroker.class)
+ .setRequired(true));
}
private void _instanceConfigure(final InventoryAndReadAdapter imp, final Component it, String containerName) {
.setService(IPluginOutInventoryService.class)
.setCallbacks("setInventoryPublisher", "unsetInventoryPublisher")
.setRequired(false));
+ it.add(createServiceDependency()
+ .setService(BindingAwareBroker.class)
+ .setRequired(true));
}
private void _configure(final TopologyAdapter imp, final Component it) {
--- /dev/null
+module node-config {
+ namespace "urn:opendaylight:module:config";
+ prefix node-config;
+
+ import flow-capable-transaction {prefix tr;}
+ import opendaylight-inventory {prefix inv;revision-date "2013-08-19";}
+
+ revision "2014-10-15" {
+ description "Initial revision of node configuration service";
+ }
+
+ grouping node-ref {
+ uses "inv:node-context-ref";
+ }
+
+
+
+ /** Base configuration structure **/
+ grouping node-config {
+ leaf flag {
+ type string;
+ description "Switch config flag. Expected values FRAGNORMAL, OFPCFRAGDROP, OFPCFRAGREASM, OFPCFRAGMASK";
+ }
+ leaf miss-search-length{
+ type uint16;
+ }
+ }
+
+ rpc set-config {
+ input {
+ uses node-config;
+ uses tr:transaction-aware;
+ uses node-ref;
+ }
+ output {
+ uses tr:transaction-aware;
+ }
+ }
+}
public class ListProcessingAndOrderingTest extends AbstractDataServiceTest {
private static final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier DOM_UNORDERED_LIST_PATH = org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier
- .builder(Lists.QNAME).node(UnorderedContainer.QNAME).node(UnorderedList.QNAME).build();
+ .builder().node(Lists.QNAME).node(UnorderedContainer.QNAME).node(UnorderedList.QNAME).build();
private static final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier DOM_ORDERED_LIST_PATH = org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier
- .builder(Lists.QNAME).node(OrderedContainer.QNAME).node(OrderedList.QNAME).build();
+ .builder().node(Lists.QNAME).node(OrderedContainer.QNAME).node(OrderedList.QNAME).build();
private static final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier DOM_UNKEYED_LIST_PATH = org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier
- .builder(Lists.QNAME).node(UnkeyedContainer.QNAME).node(UnkeyedList.QNAME).build();
+ .builder().node(Lists.QNAME).node(UnkeyedContainer.QNAME).node(UnkeyedList.QNAME).build();
private static final InstanceIdentifier<UnorderedContainer> UNORDERED_CONTAINER_PATH = InstanceIdentifier.builder(Lists.class).child(UnorderedContainer.class).build();
private static final InstanceIdentifier<OrderedContainer> ORDERED_CONTAINER_PATH = InstanceIdentifier.builder(Lists.class).child(OrderedContainer.class).build();
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.common.actor;
+
+/**
+ * Actor with its behaviour metered. Metering is enabled by configuration.
+ */
+public abstract class AbstractUntypedPersistentActorWithMetering extends AbstractUntypedPersistentActor {
+
+ public AbstractUntypedPersistentActorWithMetering() {
+ if (isMetricsCaptureEnabled())
+ getContext().become(new MeteringBehavior(this));
+ }
+
+ private boolean isMetricsCaptureEnabled(){
+ CommonConfig config = new CommonConfig(getContext().system().settings().config());
+ return config.isMetricCaptureEnabled();
+ }
+}
.create(name.trim()));
}
- return new YangInstanceIdentifier.AugmentationIdentifier(null, childNames);
+ return new YangInstanceIdentifier.AugmentationIdentifier(childNames);
}
}
}
public interface CanCommitTransactionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
+
+ // required string transactionId = 1;
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ boolean hasTransactionId();
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ java.lang.String getTransactionId();
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ com.google.protobuf.ByteString
+ getTransactionIdBytes();
}
/**
* Protobuf type {@code org.opendaylight.controller.mdsal.CanCommitTransaction}
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
+ int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
}
break;
}
+ case 10: {
+ bitField0_ |= 0x00000001;
+ transactionId_ = input.readBytes();
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
return PARSER;
}
+ private int bitField0_;
+ // required string transactionId = 1;
+ public static final int TRANSACTIONID_FIELD_NUMBER = 1;
+ private java.lang.Object transactionId_;
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public boolean hasTransactionId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public java.lang.String getTransactionId() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ transactionId_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTransactionIdBytes() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ transactionId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
private void initFields() {
+ transactionId_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
+ if (!hasTransactionId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getTransactionIdBytes());
+ }
getUnknownFields().writeTo(output);
}
if (size != -1) return size;
size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getTransactionIdBytes());
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
public Builder clear() {
super.clear();
+ transactionId_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CanCommitTransaction buildPartial() {
org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CanCommitTransaction result = new org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CanCommitTransaction(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.transactionId_ = transactionId_;
+ result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CanCommitTransaction other) {
if (other == org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CanCommitTransaction.getDefaultInstance()) return this;
+ if (other.hasTransactionId()) {
+ bitField0_ |= 0x00000001;
+ transactionId_ = other.transactionId_;
+ onChanged();
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
+ if (!hasTransactionId()) {
+
+ return false;
+ }
return true;
}
}
return this;
}
+ private int bitField0_;
+
+ // required string transactionId = 1;
+ private java.lang.Object transactionId_ = "";
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public boolean hasTransactionId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public java.lang.String getTransactionId() {
+ java.lang.Object ref = transactionId_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ transactionId_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTransactionIdBytes() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ transactionId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder setTransactionId(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ transactionId_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder clearTransactionId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ transactionId_ = getDefaultInstance().getTransactionId();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder setTransactionIdBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ transactionId_ = value;
+ onChanged();
+ return this;
+ }
// @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.CanCommitTransaction)
}
public interface AbortTransactionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
+
+ // required string transactionId = 1;
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ boolean hasTransactionId();
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ java.lang.String getTransactionId();
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ com.google.protobuf.ByteString
+ getTransactionIdBytes();
}
/**
* Protobuf type {@code org.opendaylight.controller.mdsal.AbortTransaction}
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
+ int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
}
break;
}
+ case 10: {
+ bitField0_ |= 0x00000001;
+ transactionId_ = input.readBytes();
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
return PARSER;
}
+ private int bitField0_;
+ // required string transactionId = 1;
+ public static final int TRANSACTIONID_FIELD_NUMBER = 1;
+ private java.lang.Object transactionId_;
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public boolean hasTransactionId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public java.lang.String getTransactionId() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ transactionId_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTransactionIdBytes() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ transactionId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
private void initFields() {
+ transactionId_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
+ if (!hasTransactionId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getTransactionIdBytes());
+ }
getUnknownFields().writeTo(output);
}
if (size != -1) return size;
size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getTransactionIdBytes());
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
public Builder clear() {
super.clear();
+ transactionId_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.AbortTransaction buildPartial() {
org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.AbortTransaction result = new org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.AbortTransaction(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.transactionId_ = transactionId_;
+ result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.AbortTransaction other) {
if (other == org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.AbortTransaction.getDefaultInstance()) return this;
+ if (other.hasTransactionId()) {
+ bitField0_ |= 0x00000001;
+ transactionId_ = other.transactionId_;
+ onChanged();
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
+ if (!hasTransactionId()) {
+
+ return false;
+ }
return true;
}
}
return this;
}
+ private int bitField0_;
+
+ // required string transactionId = 1;
+ private java.lang.Object transactionId_ = "";
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public boolean hasTransactionId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public java.lang.String getTransactionId() {
+ java.lang.Object ref = transactionId_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ transactionId_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTransactionIdBytes() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ transactionId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder setTransactionId(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ transactionId_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder clearTransactionId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ transactionId_ = getDefaultInstance().getTransactionId();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder setTransactionIdBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ transactionId_ = value;
+ onChanged();
+ return this;
+ }
// @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.AbortTransaction)
}
public interface CommitTransactionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
+
+ // required string transactionId = 1;
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ boolean hasTransactionId();
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ java.lang.String getTransactionId();
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ com.google.protobuf.ByteString
+ getTransactionIdBytes();
}
/**
* Protobuf type {@code org.opendaylight.controller.mdsal.CommitTransaction}
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
+ int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
}
break;
}
+ case 10: {
+ bitField0_ |= 0x00000001;
+ transactionId_ = input.readBytes();
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
return PARSER;
}
+ private int bitField0_;
+ // required string transactionId = 1;
+ public static final int TRANSACTIONID_FIELD_NUMBER = 1;
+ private java.lang.Object transactionId_;
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public boolean hasTransactionId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public java.lang.String getTransactionId() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ transactionId_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTransactionIdBytes() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ transactionId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
private void initFields() {
+ transactionId_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
+ if (!hasTransactionId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getTransactionIdBytes());
+ }
getUnknownFields().writeTo(output);
}
if (size != -1) return size;
size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getTransactionIdBytes());
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
public Builder clear() {
super.clear();
+ transactionId_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CommitTransaction buildPartial() {
org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CommitTransaction result = new org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CommitTransaction(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.transactionId_ = transactionId_;
+ result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CommitTransaction other) {
if (other == org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CommitTransaction.getDefaultInstance()) return this;
+ if (other.hasTransactionId()) {
+ bitField0_ |= 0x00000001;
+ transactionId_ = other.transactionId_;
+ onChanged();
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
+ if (!hasTransactionId()) {
+
+ return false;
+ }
return true;
}
}
return this;
}
+ private int bitField0_;
+
+ // required string transactionId = 1;
+ private java.lang.Object transactionId_ = "";
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public boolean hasTransactionId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public java.lang.String getTransactionId() {
+ java.lang.Object ref = transactionId_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ transactionId_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTransactionIdBytes() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ transactionId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder setTransactionId(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ transactionId_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder clearTransactionId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ transactionId_ = getDefaultInstance().getTransactionId();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder setTransactionIdBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ transactionId_ = value;
+ onChanged();
+ return this;
+ }
// @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.CommitTransaction)
}
static {
java.lang.String[] descriptorData = {
"\n\014Cohort.proto\022!org.opendaylight.control" +
- "ler.mdsal\"\026\n\024CanCommitTransaction\".\n\031Can" +
- "CommitTransactionReply\022\021\n\tcanCommit\030\001 \002(" +
- "\010\"\022\n\020AbortTransaction\"\027\n\025AbortTransactio" +
- "nReply\"\023\n\021CommitTransaction\"\030\n\026CommitTra" +
- "nsactionReply\"\026\n\024PreCommitTransaction\"\033\n" +
- "\031PreCommitTransactionReplyBZ\n8org.openda" +
- "ylight.controller.protobuff.messages.coh" +
- "ort3pcB\036ThreePhaseCommitCohortMessages"
+ "ler.mdsal\"-\n\024CanCommitTransaction\022\025\n\rtra" +
+ "nsactionId\030\001 \002(\t\".\n\031CanCommitTransaction" +
+ "Reply\022\021\n\tcanCommit\030\001 \002(\010\")\n\020AbortTransac" +
+ "tion\022\025\n\rtransactionId\030\001 \002(\t\"\027\n\025AbortTran" +
+ "sactionReply\"*\n\021CommitTransaction\022\025\n\rtra" +
+ "nsactionId\030\001 \002(\t\"\030\n\026CommitTransactionRep" +
+ "ly\"\026\n\024PreCommitTransaction\"\033\n\031PreCommitT" +
+ "ransactionReplyBZ\n8org.opendaylight.cont" +
+ "roller.protobuff.messages.cohort3pcB\036Thr",
+ "eePhaseCommitCohortMessages"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
internal_static_org_opendaylight_controller_mdsal_CanCommitTransaction_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_opendaylight_controller_mdsal_CanCommitTransaction_descriptor,
- new java.lang.String[] { });
+ new java.lang.String[] { "TransactionId", });
internal_static_org_opendaylight_controller_mdsal_CanCommitTransactionReply_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_org_opendaylight_controller_mdsal_CanCommitTransactionReply_fieldAccessorTable = new
internal_static_org_opendaylight_controller_mdsal_AbortTransaction_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_opendaylight_controller_mdsal_AbortTransaction_descriptor,
- new java.lang.String[] { });
+ new java.lang.String[] { "TransactionId", });
internal_static_org_opendaylight_controller_mdsal_AbortTransactionReply_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_org_opendaylight_controller_mdsal_AbortTransactionReply_fieldAccessorTable = new
internal_static_org_opendaylight_controller_mdsal_CommitTransaction_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_opendaylight_controller_mdsal_CommitTransaction_descriptor,
- new java.lang.String[] { });
+ new java.lang.String[] { "TransactionId", });
internal_static_org_opendaylight_controller_mdsal_CommitTransactionReply_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_org_opendaylight_controller_mdsal_CommitTransactionReply_fieldAccessorTable = new
// @@protoc_insertion_point(class_scope:org.opendaylight.controller.mdsal.CloseTransactionChainReply)
}
- public interface CreateTransactionChainOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
- }
- /**
- * Protobuf type {@code org.opendaylight.controller.mdsal.CreateTransactionChain}
- */
- public static final class CreateTransactionChain extends
- com.google.protobuf.GeneratedMessage
- implements CreateTransactionChainOrBuilder {
- // Use CreateTransactionChain.newBuilder() to construct.
- private CreateTransactionChain(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
- super(builder);
- this.unknownFields = builder.getUnknownFields();
- }
- private CreateTransactionChain(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final CreateTransactionChain defaultInstance;
- public static CreateTransactionChain getDefaultInstance() {
- return defaultInstance;
- }
-
- public CreateTransactionChain getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private CreateTransactionChain(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.class, org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.Builder.class);
- }
-
- public static com.google.protobuf.Parser<CreateTransactionChain> PARSER =
- new com.google.protobuf.AbstractParser<CreateTransactionChain>() {
- public CreateTransactionChain parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new CreateTransactionChain(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<CreateTransactionChain> getParserForType() {
- return PARSER;
- }
-
- private void initFields() {
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- /**
- * Protobuf type {@code org.opendaylight.controller.mdsal.CreateTransactionChain}
- */
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder<Builder>
- implements org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.class, org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.Builder.class);
- }
-
- // Construct using org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_descriptor;
- }
-
- public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain getDefaultInstanceForType() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.getDefaultInstance();
- }
-
- public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain build() {
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain buildPartial() {
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain result = new org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain(this);
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain) {
- return mergeFrom((org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain other) {
- if (other == org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
- }
- }
- return this;
- }
-
- // @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.CreateTransactionChain)
- }
-
- static {
- defaultInstance = new CreateTransactionChain(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:org.opendaylight.controller.mdsal.CreateTransactionChain)
- }
-
- public interface CreateTransactionChainReplyOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // required string transactionChainPath = 1;
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- boolean hasTransactionChainPath();
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- java.lang.String getTransactionChainPath();
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- com.google.protobuf.ByteString
- getTransactionChainPathBytes();
- }
- /**
- * Protobuf type {@code org.opendaylight.controller.mdsal.CreateTransactionChainReply}
- */
- public static final class CreateTransactionChainReply extends
- com.google.protobuf.GeneratedMessage
- implements CreateTransactionChainReplyOrBuilder {
- // Use CreateTransactionChainReply.newBuilder() to construct.
- private CreateTransactionChainReply(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
- super(builder);
- this.unknownFields = builder.getUnknownFields();
- }
- private CreateTransactionChainReply(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final CreateTransactionChainReply defaultInstance;
- public static CreateTransactionChainReply getDefaultInstance() {
- return defaultInstance;
- }
-
- public CreateTransactionChainReply getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private CreateTransactionChainReply(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- case 10: {
- bitField0_ |= 0x00000001;
- transactionChainPath_ = input.readBytes();
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.class, org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.Builder.class);
- }
-
- public static com.google.protobuf.Parser<CreateTransactionChainReply> PARSER =
- new com.google.protobuf.AbstractParser<CreateTransactionChainReply>() {
- public CreateTransactionChainReply parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new CreateTransactionChainReply(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<CreateTransactionChainReply> getParserForType() {
- return PARSER;
- }
-
- private int bitField0_;
- // required string transactionChainPath = 1;
- public static final int TRANSACTIONCHAINPATH_FIELD_NUMBER = 1;
- private java.lang.Object transactionChainPath_;
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public boolean hasTransactionChainPath() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public java.lang.String getTransactionChainPath() {
- java.lang.Object ref = transactionChainPath_;
- if (ref instanceof java.lang.String) {
- return (java.lang.String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- transactionChainPath_ = s;
- }
- return s;
- }
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public com.google.protobuf.ByteString
- getTransactionChainPathBytes() {
- java.lang.Object ref = transactionChainPath_;
- if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- transactionChainPath_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- private void initFields() {
- transactionChainPath_ = "";
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- if (!hasTransactionChainPath()) {
- memoizedIsInitialized = 0;
- return false;
- }
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, getTransactionChainPathBytes());
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, getTransactionChainPathBytes());
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- /**
- * Protobuf type {@code org.opendaylight.controller.mdsal.CreateTransactionChainReply}
- */
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder<Builder>
- implements org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReplyOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.class, org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.Builder.class);
- }
-
- // Construct using org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- transactionChainPath_ = "";
- bitField0_ = (bitField0_ & ~0x00000001);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_descriptor;
- }
-
- public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply getDefaultInstanceForType() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.getDefaultInstance();
- }
-
- public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply build() {
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply buildPartial() {
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply result = new org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- result.transactionChainPath_ = transactionChainPath_;
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply) {
- return mergeFrom((org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply other) {
- if (other == org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.getDefaultInstance()) return this;
- if (other.hasTransactionChainPath()) {
- bitField0_ |= 0x00000001;
- transactionChainPath_ = other.transactionChainPath_;
- onChanged();
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- if (!hasTransactionChainPath()) {
-
- return false;
- }
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
- }
- }
- return this;
- }
- private int bitField0_;
-
- // required string transactionChainPath = 1;
- private java.lang.Object transactionChainPath_ = "";
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public boolean hasTransactionChainPath() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public java.lang.String getTransactionChainPath() {
- java.lang.Object ref = transactionChainPath_;
- if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- transactionChainPath_ = s;
- return s;
- } else {
- return (java.lang.String) ref;
- }
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public com.google.protobuf.ByteString
- getTransactionChainPathBytes() {
- java.lang.Object ref = transactionChainPath_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- transactionChainPath_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public Builder setTransactionChainPath(
- java.lang.String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- transactionChainPath_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public Builder clearTransactionChainPath() {
- bitField0_ = (bitField0_ & ~0x00000001);
- transactionChainPath_ = getDefaultInstance().getTransactionChainPath();
- onChanged();
- return this;
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public Builder setTransactionChainPathBytes(
- com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- transactionChainPath_ = value;
- onChanged();
- return this;
- }
-
- // @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.CreateTransactionChainReply)
- }
-
- static {
- defaultInstance = new CreateTransactionChainReply(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:org.opendaylight.controller.mdsal.CreateTransactionChainReply)
- }
-
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_opendaylight_controller_mdsal_CloseTransactionChain_descriptor;
private static
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_opendaylight_controller_mdsal_CloseTransactionChainReply_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
"\n\033ShardTransactionChain.proto\022!org.opend" +
"aylight.controller.mdsal\"3\n\025CloseTransac" +
"tionChain\022\032\n\022transactionChainId\030\001 \001(\t\"\034\n" +
- "\032CloseTransactionChainReply\"\030\n\026CreateTra" +
- "nsactionChain\";\n\033CreateTransactionChainR" +
- "eply\022\034\n\024transactionChainPath\030\001 \002(\tB[\n:or" +
- "g.opendaylight.controller.protobuff.mess" +
- "ages.transactionB\035ShardTransactionChainM" +
- "essages"
+ "\032CloseTransactionChainReplyB[\n:org.opend" +
+ "aylight.controller.protobuff.messages.tr" +
+ "ansactionB\035ShardTransactionChainMessages"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_opendaylight_controller_mdsal_CloseTransactionChainReply_descriptor,
new java.lang.String[] { });
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_descriptor =
- getDescriptor().getMessageTypes().get(2);
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_descriptor,
- new java.lang.String[] { });
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_descriptor =
- getDescriptor().getMessageTypes().get(3);
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_descriptor,
- new java.lang.String[] { "TransactionChainPath", });
return null;
}
};
message CanCommitTransaction{
-
+ required string transactionId = 1;
}
message CanCommitTransactionReply{
}
message AbortTransaction{
-
+ required string transactionId = 1;
}
message AbortTransactionReply {
}
message CommitTransaction{
-
+ required string transactionId = 1;
}
message CommitTransactionReply{
package org.opendaylight.controller.protobuff.messages.transaction;
-import org.junit.Assert;
import org.junit.Test;
import org.opendaylight.controller.protobuff.messages.AbstractMessagesTest;
-import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
-import org.opendaylight.yangtools.yang.common.QName;
/**
* This test case is present to ensure that if others have used proper version of protocol buffer
@Override
@Test
public void verifySerialization() throws Exception {
- String testTransactionChainPath =
- "/actor/path";
-
- ShardTransactionChainMessages.CreateTransactionChainReply.Builder builder =
- ShardTransactionChainMessages.CreateTransactionChainReply.newBuilder();
- builder.setTransactionChainPath(testTransactionChainPath);
-
- writeToFile((com.google.protobuf.GeneratedMessage.Builder<?>) builder);
-
- // Here we will read the same and check we got back what we had saved
- ShardTransactionChainMessages.CreateTransactionChainReply replyNew =
- (ShardTransactionChainMessages.CreateTransactionChainReply) readFromFile(ShardTransactionChainMessages.CreateTransactionChainReply.PARSER);
-
- Assert.assertEquals(replyNew.getTransactionChainPath(),testTransactionChainPath);
-
- // the following will compare with the version we had shipped
- ShardTransactionChainMessages.CreateTransactionChainReply replyOriginal =
- (ShardTransactionChainMessages.CreateTransactionChainReply) readFromTestDataFile(ShardTransactionChainMessages.CreateTransactionChainReply.PARSER);
-
-
- Assert.assertEquals(replyOriginal.getTransactionChainPath(),
- replyNew.getTransactionChainPath());
-
}
@Override
static final Short OUTER_LIST_ID = (short) 10;
- static final YangInstanceIdentifier OUTER_LIST_PATH_LEGACY = YangInstanceIdentifier.builder(TEST_QNAME)
+ static final YangInstanceIdentifier OUTER_LIST_PATH_LEGACY = YangInstanceIdentifier.builder(TEST_PATH)
.nodeWithKey(OUTER_LIST_QNAME, ID_QNAME, OUTER_LIST_ID).build();
static final YangInstanceIdentifier LEAF_TWO_PATH_LEGACY = YangInstanceIdentifier.builder(OUTER_LIST_PATH_LEGACY)
.withNodeIdentifier(new NodeIdentifier(TEST_QNAME)).withChild(testAnyXmlNode).build();
DataNormalizer normalizer = new DataNormalizer(createTestContext());
- Node<?> legacyNode = normalizer.toLegacy(YangInstanceIdentifier.builder(TEST_QNAME).build(), testContainerNode);
+ Node<?> legacyNode = normalizer.toLegacy(YangInstanceIdentifier.builder().node(TEST_QNAME).build(), testContainerNode);
verifyLegacyNode(
legacyNode,
DataNormalizer normalizer = new DataNormalizer(createTestContext());
- Node<?> legacyNode = normalizer.toLegacy(YangInstanceIdentifier.builder(TEST_QNAME).build(), testContainerNode);
+ Node<?> legacyNode = normalizer.toLegacy(YangInstanceIdentifier.builder().node(TEST_QNAME).build(), testContainerNode);
verifyLegacyNode(
legacyNode,
DataNormalizer normalizer = new DataNormalizer(createTestContext());
- Node<?> legacyNode = normalizer.toLegacy(YangInstanceIdentifier.builder(TEST_QNAME).build(), testContainerNode);
+ Node<?> legacyNode = normalizer.toLegacy(YangInstanceIdentifier.builder().node(TEST_QNAME).build(), testContainerNode);
verifyLegacyNode(
legacyNode,
import akka.actor.Props;
import akka.japi.Creator;
-
import com.google.common.base.Preconditions;
import org.opendaylight.controller.cluster.common.actor.AbstractUntypedActor;
-
import org.opendaylight.controller.cluster.datastore.messages.DataChanged;
import org.opendaylight.controller.cluster.datastore.messages.DataChangedReply;
import org.opendaylight.controller.cluster.datastore.messages.EnableNotification;
public class DataChangeListener extends AbstractUntypedActor {
private final AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>> listener;
- private volatile boolean notificationsEnabled = false;
+ private boolean notificationsEnabled = false;
public DataChangeListener(AsyncDataChangeListener<YangInstanceIdentifier,
NormalizedNode<?, ?>> listener) {
change = reply.getChange();
this.listener.onDataChanged(change);
- if(getSender() != null){
+ // It seems the sender is never null but it doesn't hurt to check. If the caller passes in
+ // a null sender (ActorRef.noSender()), akka translates that to the deadLetters actor.
+ if(getSender() != null && !getContext().system().deadLetters().equals(getSender())) {
getSender().tell(new DataChangedReply(), getSelf());
}
}
package org.opendaylight.controller.cluster.datastore;
+import akka.actor.ActorRef;
import akka.actor.ActorSelection;
-
import com.google.common.base.Preconditions;
-
import org.opendaylight.controller.cluster.datastore.messages.DataChanged;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
@Override public void onDataChanged(
AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
- dataChangeListenerActor.tell(new DataChanged(schemaContext, change), null);
+ dataChangeListenerActor.tell(new DataChanged(schemaContext, change), ActorRef.noSender());
}
}
import org.opendaylight.controller.cluster.raft.ConfigParams;
import org.opendaylight.controller.cluster.raft.DefaultConfigParamsImpl;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
-
import scala.concurrent.duration.Duration;
import scala.concurrent.duration.FiniteDuration;
-
import java.util.concurrent.TimeUnit;
/**
private final int operationTimeoutInSeconds;
private final String dataStoreMXBeanType;
private final ConfigParams shardRaftConfig;
+ private final int shardTransactionCommitTimeoutInSeconds;
+ private final int shardTransactionCommitQueueCapacity;
- public DatastoreContext() {
- this("DistributedDatastore", null, Duration.create(10, TimeUnit.MINUTES), 5, 1000, 20000, 500);
- }
-
- public DatastoreContext(String dataStoreMXBeanType,
- InMemoryDOMDataStoreConfigProperties dataStoreProperties,
- Duration shardTransactionIdleTimeout,
- int operationTimeoutInSeconds,
- int shardJournalRecoveryLogBatchSize,
- int shardSnapshotBatchCount,
- int shardHeartbeatIntervalInMillis) {
- this.dataStoreMXBeanType = dataStoreMXBeanType;
+ private DatastoreContext(InMemoryDOMDataStoreConfigProperties dataStoreProperties,
+ ConfigParams shardRaftConfig, String dataStoreMXBeanType, int operationTimeoutInSeconds,
+ Duration shardTransactionIdleTimeout, int shardTransactionCommitTimeoutInSeconds,
+ int shardTransactionCommitQueueCapacity) {
this.dataStoreProperties = dataStoreProperties;
- this.shardTransactionIdleTimeout = shardTransactionIdleTimeout;
+ this.shardRaftConfig = shardRaftConfig;
+ this.dataStoreMXBeanType = dataStoreMXBeanType;
this.operationTimeoutInSeconds = operationTimeoutInSeconds;
+ this.shardTransactionIdleTimeout = shardTransactionIdleTimeout;
+ this.shardTransactionCommitTimeoutInSeconds = shardTransactionCommitTimeoutInSeconds;
+ this.shardTransactionCommitQueueCapacity = shardTransactionCommitQueueCapacity;
+ }
- DefaultConfigParamsImpl raftConfig = new DefaultConfigParamsImpl();
- raftConfig.setHeartBeatInterval(new FiniteDuration(shardHeartbeatIntervalInMillis,
- TimeUnit.MILLISECONDS));
- raftConfig.setJournalRecoveryLogBatchSize(shardJournalRecoveryLogBatchSize);
- raftConfig.setSnapshotBatchCount(shardSnapshotBatchCount);
- shardRaftConfig = raftConfig;
+ public static Builder newBuilder() {
+ return new Builder();
}
public InMemoryDOMDataStoreConfigProperties getDataStoreProperties() {
public ConfigParams getShardRaftConfig() {
return shardRaftConfig;
}
+
+ public int getShardTransactionCommitTimeoutInSeconds() {
+ return shardTransactionCommitTimeoutInSeconds;
+ }
+
+ public int getShardTransactionCommitQueueCapacity() {
+ return shardTransactionCommitQueueCapacity;
+ }
+
+ public static class Builder {
+ private InMemoryDOMDataStoreConfigProperties dataStoreProperties;
+ private Duration shardTransactionIdleTimeout = Duration.create(10, TimeUnit.MINUTES);
+ private int operationTimeoutInSeconds = 5;
+ private String dataStoreMXBeanType;
+ private int shardTransactionCommitTimeoutInSeconds = 30;
+ private int shardJournalRecoveryLogBatchSize = 1000;
+ private int shardSnapshotBatchCount = 20000;
+ private int shardHeartbeatIntervalInMillis = 500;
+ private int shardTransactionCommitQueueCapacity = 20000;
+
+ public Builder shardTransactionIdleTimeout(Duration shardTransactionIdleTimeout) {
+ this.shardTransactionIdleTimeout = shardTransactionIdleTimeout;
+ return this;
+ }
+
+ public Builder operationTimeoutInSeconds(int operationTimeoutInSeconds) {
+ this.operationTimeoutInSeconds = operationTimeoutInSeconds;
+ return this;
+ }
+
+ public Builder dataStoreMXBeanType(String dataStoreMXBeanType) {
+ this.dataStoreMXBeanType = dataStoreMXBeanType;
+ return this;
+ }
+
+ public Builder dataStoreProperties(InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
+ this.dataStoreProperties = dataStoreProperties;
+ return this;
+ }
+
+ public Builder shardTransactionCommitTimeoutInSeconds(int shardTransactionCommitTimeoutInSeconds) {
+ this.shardTransactionCommitTimeoutInSeconds = shardTransactionCommitTimeoutInSeconds;
+ return this;
+ }
+
+ public Builder shardJournalRecoveryLogBatchSize(int shardJournalRecoveryLogBatchSize) {
+ this.shardJournalRecoveryLogBatchSize = shardJournalRecoveryLogBatchSize;
+ return this;
+ }
+
+ public Builder shardSnapshotBatchCount(int shardSnapshotBatchCount) {
+ this.shardSnapshotBatchCount = shardSnapshotBatchCount;
+ return this;
+ }
+
+ public Builder shardHeartbeatIntervalInMillis(int shardHeartbeatIntervalInMillis) {
+ this.shardHeartbeatIntervalInMillis = shardHeartbeatIntervalInMillis;
+ return this;
+ }
+
+ public Builder shardTransactionCommitQueueCapacity(int shardTransactionCommitQueueCapacity) {
+ this.shardTransactionCommitQueueCapacity = shardTransactionCommitQueueCapacity;
+ return this;
+ }
+
+ public DatastoreContext build() {
+ DefaultConfigParamsImpl raftConfig = new DefaultConfigParamsImpl();
+ raftConfig.setHeartBeatInterval(new FiniteDuration(shardHeartbeatIntervalInMillis,
+ TimeUnit.MILLISECONDS));
+ raftConfig.setJournalRecoveryLogBatchSize(shardJournalRecoveryLogBatchSize);
+ raftConfig.setSnapshotBatchCount(shardSnapshotBatchCount);
+
+ return new DatastoreContext(dataStoreProperties, raftConfig, dataStoreMXBeanType,
+ operationTimeoutInSeconds, shardTransactionIdleTimeout,
+ shardTransactionCommitTimeoutInSeconds, shardTransactionCommitQueueCapacity);
+ }
+ }
}
import akka.actor.ActorSystem;
import akka.dispatch.OnComplete;
import akka.util.Timeout;
+import com.google.common.base.Optional;
+import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardManagerIdentifier;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListener;
Preconditions.checkNotNull(path, "path should not be null");
Preconditions.checkNotNull(listener, "listener should not be null");
- if(LOG.isDebugEnabled()) {
- LOG.debug("Registering listener: {} for path: {} scope: {}", listener, path, scope);
- }
- ActorRef dataChangeListenerActor = actorContext.getActorSystem().actorOf(
- DataChangeListener.props(listener ));
+
+ LOG.debug("Registering listener: {} for path: {} scope: {}", listener, path, scope);
String shardName = ShardStrategyFactory.getStrategy(path).findShard(path);
- Future future = actorContext.executeLocalShardOperationAsync(shardName,
- new RegisterChangeListener(path, dataChangeListenerActor.path(), scope),
- new Timeout(actorContext.getOperationDuration().$times(
- REGISTER_DATA_CHANGE_LISTENER_TIMEOUT_FACTOR)));
+ Optional<ActorRef> shard = actorContext.findLocalShard(shardName);
+
+ //if shard is NOT local
+ if (!shard.isPresent()) {
+ LOG.debug("No local shard for shardName {} was found so returning a noop registration", shardName);
+ return new NoOpDataChangeListenerRegistration(listener);
+ }
+ //if shard is local
+ ActorRef dataChangeListenerActor = actorContext.getActorSystem().actorOf(DataChangeListener.props(listener));
+ Future future = actorContext.executeOperationAsync(shard.get(),
+ new RegisterChangeListener(path, dataChangeListenerActor.path(), scope),
+ new Timeout(actorContext.getOperationDuration().$times(REGISTER_DATA_CHANGE_LISTENER_TIMEOUT_FACTOR)));
- if (future != null) {
- final DataChangeListenerRegistrationProxy listenerRegistrationProxy =
+ final DataChangeListenerRegistrationProxy listenerRegistrationProxy =
new DataChangeListenerRegistrationProxy(listener, dataChangeListenerActor);
- future.onComplete(new OnComplete(){
+ future.onComplete(new OnComplete() {
- @Override public void onComplete(Throwable failure, Object result)
+ @Override
+ public void onComplete(Throwable failure, Object result)
throws Throwable {
- if(failure != null){
- LOG.error("Failed to register listener at path " + path.toString(), failure);
- return;
- }
- RegisterChangeListenerReply reply = (RegisterChangeListenerReply) result;
- listenerRegistrationProxy.setListenerRegistrationActor(actorContext
- .actorSelection(reply.getListenerRegistrationPath()));
+ if (failure != null) {
+ LOG.error("Failed to register listener at path " + path.toString(), failure);
+ return;
}
- }, actorContext.getActorSystem().dispatcher());
- return listenerRegistrationProxy;
- }
- if(LOG.isDebugEnabled()) {
- LOG.debug(
- "No local shard for shardName {} was found so returning a noop registration",
- shardName);
- }
- return new NoOpDataChangeListenerRegistration(listener);
+ RegisterChangeListenerReply reply = (RegisterChangeListenerReply) result;
+ listenerRegistrationProxy.setListenerRegistrationActor(actorContext
+ .actorSelection(reply.getListenerRegistrationPath()));
+ }
+ }, actorContext.getActorSystem().dispatcher());
+
+ return listenerRegistrationProxy;
+
}
@Override
public void close() throws Exception {
actorContext.shutdown();
}
+
+ @VisibleForTesting
+ ActorContext getActorContext() {
+ return actorContext;
+ }
}
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
+import akka.actor.Cancellable;
import akka.actor.PoisonPill;
import akka.actor.Props;
import akka.event.Logging;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
-import com.google.common.util.concurrent.CheckedFuture;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.protobuf.InvalidProtocolBufferException;
import org.opendaylight.controller.cluster.common.actor.CommonConfig;
import org.opendaylight.controller.cluster.common.actor.MeteringBehavior;
+import org.opendaylight.controller.cluster.datastore.ShardCommitCoordinator.CohortEntry;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardTransactionIdentifier;
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardMBeanFactory;
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
+import org.opendaylight.controller.cluster.datastore.messages.ActorInitialized;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransactionChain;
+import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CreateTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.EnableNotification;
-import org.opendaylight.controller.cluster.datastore.messages.ForwardedCommitTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.ForwardedReadyTransaction;
import org.opendaylight.controller.cluster.datastore.messages.PeerAddressResolved;
import org.opendaylight.controller.cluster.datastore.messages.ReadData;
import org.opendaylight.controller.cluster.datastore.messages.ReadDataReply;
+import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListener;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListenerReply;
import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext;
import org.opendaylight.controller.cluster.raft.protobuff.client.messages.CompositeModificationPayload;
import org.opendaylight.controller.cluster.raft.protobuff.client.messages.Payload;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
-import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreFactory;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionFactory;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import scala.concurrent.duration.Duration;
+import scala.concurrent.duration.FiniteDuration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import javax.annotation.Nonnull;
/**
* A Shard represents a portion of the logical data tree <br/>
*/
public class Shard extends RaftActor {
+ private static final Object COMMIT_TRANSACTION_REPLY = new CommitTransactionReply().toSerializable();
+
+ private static final Object TX_COMMIT_TIMEOUT_CHECK_MESSAGE = "txCommitTimeoutCheck";
+
public static final String DEFAULT_NAME = "default";
// The state of this Shard
private final InMemoryDOMDataStore store;
- private final Map<Object, DOMStoreThreePhaseCommitCohort>
- modificationToCohort = new HashMap<>();
-
private final LoggingAdapter LOG =
Logging.getLogger(getContext().system(), this);
private ActorRef createSnapshotTransaction;
+ private int createSnapshotTransactionCounter;
+
+ private final ShardCommitCoordinator commitCoordinator;
+
+ private final long transactionCommitTimeout;
+
+ private Cancellable txCommitTimeoutCheckSchedule;
+
/**
* Coordinates persistence recovery on startup.
*/
if (isMetricsCaptureEnabled()) {
getContext().become(new MeteringBehavior(this));
}
+
+ commitCoordinator = new ShardCommitCoordinator(TimeUnit.SECONDS.convert(1, TimeUnit.MINUTES),
+ datastoreContext.getShardTransactionCommitQueueCapacity());
+
+ transactionCommitTimeout = TimeUnit.MILLISECONDS.convert(
+ datastoreContext.getShardTransactionCommitTimeoutInSeconds(), TimeUnit.SECONDS);
}
private static Map<String, String> mapPeerAddresses(
return Props.create(new ShardCreator(name, peerAddresses, datastoreContext, schemaContext));
}
- @Override public void onReceiveRecover(Object message) {
+ @Override
+ public void postStop() {
+ super.postStop();
+
+ if(txCommitTimeoutCheckSchedule != null) {
+ txCommitTimeoutCheckSchedule.cancel();
+ }
+ }
+
+ @Override
+ public void onReceiveRecover(Object message) {
if(LOG.isDebugEnabled()) {
LOG.debug("onReceiveRecover: Received message {} from {}",
message.getClass().toString(),
}
}
- @Override public void onReceiveCommand(Object message) {
+ @Override
+ public void onReceiveCommand(Object message) {
if(LOG.isDebugEnabled()) {
- LOG.debug("onReceiveCommand: Received message {} from {}",
- message.getClass().toString(),
- getSender());
+ LOG.debug("onReceiveCommand: Received message {} from {}", message, getSender());
}
if(message.getClass().equals(ReadDataReply.SERIALIZABLE_CLASS)) {
- // This must be for install snapshot. Don't want to open this up and trigger
- // deSerialization
- self()
- .tell(new CaptureSnapshotReply(ReadDataReply.getNormalizedNodeByteString(message)),
- self());
-
- createSnapshotTransaction = null;
- // Send a PoisonPill instead of sending close transaction because we do not really need
- // a response
- getSender().tell(PoisonPill.getInstance(), self());
-
+ handleReadDataReply(message);
+ } else if (message.getClass().equals(CreateTransaction.SERIALIZABLE_CLASS)) {
+ handleCreateTransaction(message);
+ } else if(message instanceof ForwardedReadyTransaction) {
+ handleForwardedReadyTransaction((ForwardedReadyTransaction)message);
+ } else if(message.getClass().equals(CanCommitTransaction.SERIALIZABLE_CLASS)) {
+ handleCanCommitTransaction(CanCommitTransaction.fromSerializable(message));
+ } else if(message.getClass().equals(CommitTransaction.SERIALIZABLE_CLASS)) {
+ handleCommitTransaction(CommitTransaction.fromSerializable(message));
+ } else if(message.getClass().equals(AbortTransaction.SERIALIZABLE_CLASS)) {
+ handleAbortTransaction(AbortTransaction.fromSerializable(message));
} else if (message.getClass().equals(CloseTransactionChain.SERIALIZABLE_CLASS)){
closeTransactionChain(CloseTransactionChain.fromSerializable(message));
} else if (message instanceof RegisterChangeListener) {
registerChangeListener((RegisterChangeListener) message);
} else if (message instanceof UpdateSchemaContext) {
updateSchemaContext((UpdateSchemaContext) message);
- } else if (message instanceof ForwardedCommitTransaction) {
- handleForwardedCommit((ForwardedCommitTransaction) message);
- } else if (message.getClass()
- .equals(CreateTransaction.SERIALIZABLE_CLASS)) {
- if (isLeader()) {
- createTransaction(CreateTransaction.fromSerializable(message));
- } else if (getLeader() != null) {
- getLeader().forward(message, getContext());
- } else {
- getSender().tell(new akka.actor.Status.Failure(new IllegalStateException(
- "Could not find leader so transaction cannot be created")), getSelf());
- }
} else if (message instanceof PeerAddressResolved) {
PeerAddressResolved resolved = (PeerAddressResolved) message;
setPeerAddress(resolved.getPeerId().toString(),
resolved.getPeerAddress());
+ } else if(message.equals(TX_COMMIT_TIMEOUT_CHECK_MESSAGE)) {
+ handleTransactionCommitTimeoutCheck();
} else {
super.onReceiveCommand(message);
}
}
+ private void handleTransactionCommitTimeoutCheck() {
+ CohortEntry cohortEntry = commitCoordinator.getCurrentCohortEntry();
+ if(cohortEntry != null) {
+ long elapsed = System.currentTimeMillis() - cohortEntry.getLastAccessTime();
+ if(elapsed > transactionCommitTimeout) {
+ LOG.warning("Current transaction {} has timed out after {} ms - aborting",
+ cohortEntry.getTransactionID(), transactionCommitTimeout);
+
+ doAbortTransaction(cohortEntry.getTransactionID(), null);
+ }
+ }
+ }
+
+ private void handleCommitTransaction(CommitTransaction commit) {
+ final String transactionID = commit.getTransactionID();
+
+ LOG.debug("Committing transaction {}", transactionID);
+
+ // Get the current in-progress cohort entry in the commitCoordinator if it corresponds to
+ // this transaction.
+ final CohortEntry cohortEntry = commitCoordinator.getCohortEntryIfCurrent(transactionID);
+ if(cohortEntry == null) {
+ // We're not the current Tx - the Tx was likely expired b/c it took too long in
+ // between the canCommit and commit messages.
+ IllegalStateException ex = new IllegalStateException(
+ String.format("Cannot commit transaction %s - it is not the current transaction",
+ transactionID));
+ LOG.error(ex.getMessage());
+ shardMBean.incrementFailedTransactionsCount();
+ getSender().tell(new akka.actor.Status.Failure(ex), getSelf());
+ return;
+ }
+
+ // We perform the preCommit phase here atomically with the commit phase. This is an
+ // optimization to eliminate the overhead of an extra preCommit message. We lose front-end
+ // coordination of preCommit across shards in case of failure but preCommit should not
+ // normally fail since we ensure only one concurrent 3-phase commit.
+
+ try {
+ // We block on the future here so we don't have to worry about possibly accessing our
+ // state on a different thread outside of our dispatcher. Also, the data store
+ // currently uses a same thread executor anyway.
+ cohortEntry.getCohort().preCommit().get();
+
+ if(persistent) {
+ Shard.this.persistData(getSender(), transactionID,
+ new CompositeModificationPayload(cohortEntry.getModification().toSerializable()));
+ } else {
+ Shard.this.finishCommit(getSender(), transactionID);
+ }
+ } catch (InterruptedException | ExecutionException e) {
+ LOG.error(e, "An exception occurred while preCommitting transaction {}",
+ cohortEntry.getTransactionID());
+ shardMBean.incrementFailedTransactionsCount();
+ getSender().tell(new akka.actor.Status.Failure(e), getSelf());
+ }
+
+ cohortEntry.updateLastAccessTime();
+ }
+
+ private void finishCommit(@Nonnull final ActorRef sender, final @Nonnull String transactionID) {
+ // With persistence enabled, this method is called via applyState by the leader strategy
+ // after the commit has been replicated to a majority of the followers.
+
+ CohortEntry cohortEntry = commitCoordinator.getCohortEntryIfCurrent(transactionID);
+ if(cohortEntry == null) {
+ // The transaction is no longer the current commit. This can happen if the transaction
+ // was aborted prior, most likely due to timeout in the front-end. We need to finish
+ // committing the transaction though since it was successfully persisted and replicated
+ // however we can't use the original cohort b/c it was already preCommitted and may
+ // conflict with the current commit or may have been aborted so we commit with a new
+ // transaction.
+ cohortEntry = commitCoordinator.getAndRemoveCohortEntry(transactionID);
+ if(cohortEntry != null) {
+ commitWithNewTransaction(cohortEntry.getModification());
+ sender.tell(COMMIT_TRANSACTION_REPLY, getSelf());
+ } else {
+ // This really shouldn't happen - it likely means that persistence or replication
+ // took so long to complete such that the cohort entry was expired from the cache.
+ IllegalStateException ex = new IllegalStateException(
+ String.format("Could not finish committing transaction %s - no CohortEntry found",
+ transactionID));
+ LOG.error(ex.getMessage());
+ sender.tell(new akka.actor.Status.Failure(ex), getSelf());
+ }
+
+ return;
+ }
+
+ LOG.debug("Finishing commit for transaction {}", cohortEntry.getTransactionID());
+
+ try {
+ // We block on the future here so we don't have to worry about possibly accessing our
+ // state on a different thread outside of our dispatcher. Also, the data store
+ // currently uses a same thread executor anyway.
+ cohortEntry.getCohort().commit().get();
+
+ sender.tell(COMMIT_TRANSACTION_REPLY, getSelf());
+
+ shardMBean.incrementCommittedTransactionCount();
+ shardMBean.setLastCommittedTransactionTime(System.currentTimeMillis());
+
+ } catch (InterruptedException | ExecutionException e) {
+ sender.tell(new akka.actor.Status.Failure(e), getSelf());
+
+ LOG.error(e, "An exception occurred while committing transaction {}", transactionID);
+ shardMBean.incrementFailedTransactionsCount();
+ }
+
+ commitCoordinator.currentTransactionComplete(transactionID, true);
+ }
+
+ private void handleCanCommitTransaction(CanCommitTransaction canCommit) {
+ LOG.debug("Can committing transaction {}", canCommit.getTransactionID());
+ commitCoordinator.handleCanCommit(canCommit, getSender(), self());
+ }
+
+ private void handleForwardedReadyTransaction(ForwardedReadyTransaction ready) {
+ LOG.debug("Readying transaction {}", ready.getTransactionID());
+
+ // This message is forwarded by the ShardTransaction on ready. We cache the cohort in the
+ // commitCoordinator in preparation for the subsequent three phase commit initiated by
+ // the front-end.
+ commitCoordinator.transactionReady(ready.getTransactionID(), ready.getCohort(),
+ ready.getModification());
+
+ // Return our actor path as we'll handle the three phase commit.
+ getSender().tell(new ReadyTransactionReply(Serialization.serializedActorPath(self())).
+ toSerializable(), getSelf());
+ }
+
+ private void handleAbortTransaction(AbortTransaction abort) {
+ doAbortTransaction(abort.getTransactionID(), getSender());
+ }
+
+ private void doAbortTransaction(String transactionID, final ActorRef sender) {
+ final CohortEntry cohortEntry = commitCoordinator.getCohortEntryIfCurrent(transactionID);
+ if(cohortEntry != null) {
+ LOG.debug("Aborting transaction {}", transactionID);
+
+ // We don't remove the cached cohort entry here (ie pass false) in case the Tx was
+ // aborted during replication in which case we may still commit locally if replication
+ // succeeds.
+ commitCoordinator.currentTransactionComplete(transactionID, false);
+
+ final ListenableFuture<Void> future = cohortEntry.getCohort().abort();
+ final ActorRef self = getSelf();
+
+ Futures.addCallback(future, new FutureCallback<Void>() {
+ @Override
+ public void onSuccess(Void v) {
+ shardMBean.incrementAbortTransactionsCount();
+
+ if(sender != null) {
+ sender.tell(new AbortTransactionReply().toSerializable(), self);
+ }
+ }
+
+ @Override
+ public void onFailure(Throwable t) {
+ LOG.error(t, "An exception happened during abort");
+
+ if(sender != null) {
+ sender.tell(new akka.actor.Status.Failure(t), self);
+ }
+ }
+ });
+ }
+ }
+
+ private void handleCreateTransaction(Object message) {
+ if (isLeader()) {
+ createTransaction(CreateTransaction.fromSerializable(message));
+ } else if (getLeader() != null) {
+ getLeader().forward(message, getContext());
+ } else {
+ getSender().tell(new akka.actor.Status.Failure(new IllegalStateException(
+ "Could not find shard leader so transaction cannot be created. This typically happens" +
+ " when system is coming up or recovering and a leader is being elected. Try again" +
+ " later.")), getSelf());
+ }
+ }
+
+ private void handleReadDataReply(Object message) {
+ // This must be for install snapshot. Don't want to open this up and trigger
+ // deSerialization
+
+ self().tell(new CaptureSnapshotReply(ReadDataReply.getNormalizedNodeByteString(message)),
+ self());
+
+ createSnapshotTransaction = null;
+
+ // Send a PoisonPill instead of sending close transaction because we do not really need
+ // a response
+ getSender().tell(PoisonPill.getInstance(), self());
+ }
+
private void closeTransactionChain(CloseTransactionChain closeTransactionChain) {
DOMStoreTransactionChain chain =
transactionChains.remove(closeTransactionChain.getTransactionChainId());
throw new NullPointerException("schemaContext should not be null");
}
- if (transactionType
- == TransactionProxy.TransactionType.READ_ONLY.ordinal()) {
+ if (transactionType == TransactionProxy.TransactionType.READ_ONLY.ordinal()) {
shardMBean.incrementReadOnlyTransactionCount();
return getContext().actorOf(
ShardTransaction.props(factory.newReadOnlyTransaction(), getSelf(),
- schemaContext,datastoreContext, shardMBean), transactionId.toString());
+ schemaContext,datastoreContext, shardMBean,
+ transactionId.getRemoteTransactionId()), transactionId.toString());
- } else if (transactionType
- == TransactionProxy.TransactionType.READ_WRITE.ordinal()) {
+ } else if (transactionType == TransactionProxy.TransactionType.READ_WRITE.ordinal()) {
shardMBean.incrementReadWriteTransactionCount();
return getContext().actorOf(
ShardTransaction.props(factory.newReadWriteTransaction(), getSelf(),
- schemaContext, datastoreContext, shardMBean), transactionId.toString());
+ schemaContext, datastoreContext, shardMBean,
+ transactionId.getRemoteTransactionId()), transactionId.toString());
- } else if (transactionType
- == TransactionProxy.TransactionType.WRITE_ONLY.ordinal()) {
+ } else if (transactionType == TransactionProxy.TransactionType.WRITE_ONLY.ordinal()) {
shardMBean.incrementWriteOnlyTransactionCount();
return getContext().actorOf(
ShardTransaction.props(factory.newWriteOnlyTransaction(), getSelf(),
- schemaContext, datastoreContext, shardMBean), transactionId.toString());
+ schemaContext, datastoreContext, shardMBean,
+ transactionId.getRemoteTransactionId()), transactionId.toString());
} else {
throw new IllegalArgumentException(
"Shard="+name + ":CreateTransaction message has unidentified transaction type="
commitCohort.commit().get();
}
-
- private void commit(final ActorRef sender, Object serialized) {
- Modification modification = MutableCompositeModification
- .fromSerializable(serialized, schemaContext);
- DOMStoreThreePhaseCommitCohort cohort =
- modificationToCohort.remove(serialized);
- if (cohort == null) {
- // If there's no cached cohort then we must be applying replicated state.
- commitWithNewTransaction(serialized);
- return;
- }
-
- if(sender == null) {
- LOG.error("Commit failed. Sender cannot be null");
- return;
- }
-
- ListenableFuture<Void> future = cohort.commit();
-
- Futures.addCallback(future, new FutureCallback<Void>() {
- @Override
- public void onSuccess(Void v) {
- sender.tell(new CommitTransactionReply().toSerializable(), getSelf());
- shardMBean.incrementCommittedTransactionCount();
- shardMBean.setLastCommittedTransactionTime(System.currentTimeMillis());
- }
-
- @Override
- public void onFailure(Throwable t) {
- LOG.error(t, "An exception happened during commit");
- shardMBean.incrementFailedTransactionsCount();
- sender.tell(new akka.actor.Status.Failure(t), getSelf());
- }
- });
-
- }
-
- private void commitWithNewTransaction(Object modification) {
+ private void commitWithNewTransaction(Modification modification) {
DOMStoreWriteTransaction tx = store.newWriteOnlyTransaction();
- MutableCompositeModification.fromSerializable(modification, schemaContext).apply(tx);
+ modification.apply(tx);
try {
syncCommitTransaction(tx);
shardMBean.incrementCommittedTransactionCount();
+ shardMBean.setLastCommittedTransactionTime(System.currentTimeMillis());
} catch (InterruptedException | ExecutionException e) {
shardMBean.incrementFailedTransactionsCount();
LOG.error(e, "Failed to commit");
}
}
- private void handleForwardedCommit(ForwardedCommitTransaction message) {
- Object serializedModification =
- message.getModification().toSerializable();
-
- modificationToCohort
- .put(serializedModification, message.getCohort());
-
- if (persistent) {
- this.persistData(getSender(), "identifier",
- new CompositeModificationPayload(serializedModification));
- } else {
- this.commit(getSender(), serializedModification);
- }
- }
-
private void updateSchemaContext(UpdateSchemaContext message) {
this.schemaContext = message.getSchemaContext();
updateSchemaContext(message.getSchemaContext());
recoveryCoordinator = null;
currentLogRecoveryBatch = null;
updateJournalStats();
+
+ //notify shard manager
+ getContext().parent().tell(new ActorInitialized(), getSelf());
+
+ // Schedule a message to be periodically sent to check if the current in-progress
+ // transaction should be expired and aborted.
+ FiniteDuration period = Duration.create(transactionCommitTimeout / 3, TimeUnit.MILLISECONDS);
+ txCommitTimeoutCheckSchedule = getContext().system().scheduler().schedule(
+ period, period, getSelf(),
+ TX_COMMIT_TIMEOUT_CHECK_MESSAGE, getContext().dispatcher(), ActorRef.noSender());
}
@Override
if (data instanceof CompositeModificationPayload) {
Object modification = ((CompositeModificationPayload) data).getModification();
- if (modification != null) {
- commit(clientActor, modification);
- } else {
+ if(modification == null) {
LOG.error(
- "modification is null - this is very unexpected, clientActor = {}, identifier = {}",
- identifier, clientActor != null ? clientActor.path().toString() : null);
+ "modification is null - this is very unexpected, clientActor = {}, identifier = {}",
+ identifier, clientActor != null ? clientActor.path().toString() : null);
+ } else if(clientActor == null) {
+ // There's no clientActor to which to send a commit reply so we must be applying
+ // replicated state from the leader.
+ commitWithNewTransaction(MutableCompositeModification.fromSerializable(
+ modification, schemaContext));
+ } else {
+ // This must be the OK to commit after replication consensus.
+ finishCommit(clientActor, identifier);
}
-
} else {
LOG.error("Unknown state received {} Class loader = {} CompositeNodeMod.ClassLoader = {}",
data, data.getClass().getClassLoader(),
// so that this actor does not get block building the snapshot
createSnapshotTransaction = createTransaction(
TransactionProxy.TransactionType.READ_ONLY.ordinal(),
- "createSnapshot", "");
+ "createSnapshot" + ++createSnapshotTransactionCounter, "");
createSnapshotTransaction.tell(
new ReadData(YangInstanceIdentifier.builder().build()).toSerializable(), self());
}
@VisibleForTesting
- NormalizedNode<?,?> readStore(YangInstanceIdentifier id)
- throws ExecutionException, InterruptedException {
- DOMStoreReadTransaction transaction = store.newReadOnlyTransaction();
-
- CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> future =
- transaction.read(id);
-
- Optional<NormalizedNode<?, ?>> optional = future.get();
- NormalizedNode<?, ?> node = optional.isPresent()? optional.get() : null;
-
- transaction.close();
-
- return node;
- }
-
- @VisibleForTesting
- void writeToStore(YangInstanceIdentifier id, NormalizedNode<?,?> node)
- throws ExecutionException, InterruptedException {
- DOMStoreWriteTransaction transaction = store.newWriteOnlyTransaction();
-
- transaction.write(id, node);
-
- syncCommitTransaction(transaction);
+ InMemoryDOMDataStore getDataStore() {
+ return store;
}
@VisibleForTesting
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore;
+
+import java.util.LinkedList;
+import java.util.Queue;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.modification.Modification;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import akka.actor.ActorRef;
+import akka.actor.Status;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+
+/**
+ * Coordinates commits for a shard ensuring only one concurrent 3-phase commit.
+ *
+ * @author Thomas Pantelis
+ */
+public class ShardCommitCoordinator {
+
+ private static final Logger LOG = LoggerFactory.getLogger(ShardCommitCoordinator.class);
+
+ private static final Object CAN_COMMIT_REPLY_TRUE =
+ new CanCommitTransactionReply(Boolean.TRUE).toSerializable();
+
+ private static final Object CAN_COMMIT_REPLY_FALSE =
+ new CanCommitTransactionReply(Boolean.FALSE).toSerializable();
+
+ private final Cache<String, CohortEntry> cohortCache;
+
+ private CohortEntry currentCohortEntry;
+
+ private final Queue<CohortEntry> queuedCohortEntries;
+
+ private final int queueCapacity;
+
+ public ShardCommitCoordinator(long cacheExpiryTimeoutInSec, int queueCapacity) {
+ cohortCache = CacheBuilder.newBuilder().expireAfterAccess(
+ cacheExpiryTimeoutInSec, TimeUnit.SECONDS).build();
+
+ this.queueCapacity = queueCapacity;
+
+ // We use a LinkedList here to avoid synchronization overhead with concurrent queue impls
+ // since this should only be accessed on the shard's dispatcher.
+ queuedCohortEntries = new LinkedList<>();
+ }
+
+ /**
+ * This method caches a cohort entry for the given transactions ID in preparation for the
+ * subsequent 3-phase commit.
+ *
+ * @param transactionID the ID of the transaction
+ * @param cohort the cohort to participate in the transaction commit
+ * @param modification the modification made by the transaction
+ */
+ public void transactionReady(String transactionID, DOMStoreThreePhaseCommitCohort cohort,
+ Modification modification) {
+
+ cohortCache.put(transactionID, new CohortEntry(transactionID, cohort, modification));
+ }
+
+ /**
+ * This method handles the canCommit phase for a transaction.
+ *
+ * @param canCommit the CanCommitTransaction message
+ * @param sender the actor that sent the message
+ * @param shard the transaction's shard actor
+ */
+ public void handleCanCommit(CanCommitTransaction canCommit, final ActorRef sender,
+ final ActorRef shard) {
+ String transactionID = canCommit.getTransactionID();
+ if(LOG.isDebugEnabled()) {
+ LOG.debug("Processing canCommit for transaction {} for shard {}",
+ transactionID, shard.path());
+ }
+
+ // Lookup the cohort entry that was cached previously (or should have been) by
+ // transactionReady (via the ForwardedReadyTransaction message).
+ final CohortEntry cohortEntry = cohortCache.getIfPresent(transactionID);
+ if(cohortEntry == null) {
+ // Either canCommit was invoked before ready(shouldn't happen) or a long time passed
+ // between canCommit and ready and the entry was expired from the cache.
+ IllegalStateException ex = new IllegalStateException(
+ String.format("No cohort entry found for transaction %s", transactionID));
+ LOG.error(ex.getMessage());
+ sender.tell(new Status.Failure(ex), shard);
+ return;
+ }
+
+ cohortEntry.setCanCommitSender(sender);
+ cohortEntry.setShard(shard);
+
+ if(currentCohortEntry != null) {
+ // There's already a Tx commit in progress - attempt to queue this entry to be
+ // committed after the current Tx completes.
+ LOG.debug("Transaction {} is already in progress - queueing transaction {}",
+ currentCohortEntry.getTransactionID(), transactionID);
+
+ if(queuedCohortEntries.size() < queueCapacity) {
+ queuedCohortEntries.offer(cohortEntry);
+ } else {
+ removeCohortEntry(transactionID);
+
+ RuntimeException ex = new RuntimeException(
+ String.format("Could not enqueue transaction %s - the maximum commit queue"+
+ " capacity %d has been reached.",
+ transactionID, queueCapacity));
+ LOG.error(ex.getMessage());
+ sender.tell(new Status.Failure(ex), shard);
+ }
+ } else {
+ // No Tx commit currently in progress - make this the current entry and proceed with
+ // canCommit.
+ cohortEntry.updateLastAccessTime();
+ currentCohortEntry = cohortEntry;
+
+ doCanCommit(cohortEntry);
+ }
+ }
+
+ private void doCanCommit(final CohortEntry cohortEntry) {
+
+ try {
+ // We block on the future here so we don't have to worry about possibly accessing our
+ // state on a different thread outside of our dispatcher. Also, the data store
+ // currently uses a same thread executor anyway.
+ Boolean canCommit = cohortEntry.getCohort().canCommit().get();
+
+ cohortEntry.getCanCommitSender().tell(
+ canCommit ? CAN_COMMIT_REPLY_TRUE : CAN_COMMIT_REPLY_FALSE, cohortEntry.getShard());
+
+ if(!canCommit) {
+ // Remove the entry from the cache now since the Tx will be aborted.
+ removeCohortEntry(cohortEntry.getTransactionID());
+ }
+ } catch (InterruptedException | ExecutionException e) {
+ LOG.debug("An exception occurred during canCommit", e);
+
+ // Remove the entry from the cache now since the Tx will be aborted.
+ removeCohortEntry(cohortEntry.getTransactionID());
+ cohortEntry.getCanCommitSender().tell(new Status.Failure(e), cohortEntry.getShard());
+ }
+ }
+
+ /**
+ * Returns the cohort entry for the Tx commit currently in progress if the given transaction ID
+ * matches the current entry.
+ *
+ * @param transactionID the ID of the transaction
+ * @return the current CohortEntry or null if the given transaction ID does not match the
+ * current entry.
+ */
+ public CohortEntry getCohortEntryIfCurrent(String transactionID) {
+ if(isCurrentTransaction(transactionID)) {
+ return currentCohortEntry;
+ }
+
+ return null;
+ }
+
+ public CohortEntry getCurrentCohortEntry() {
+ return currentCohortEntry;
+ }
+
+ public CohortEntry getAndRemoveCohortEntry(String transactionID) {
+ CohortEntry cohortEntry = cohortCache.getIfPresent(transactionID);
+ cohortCache.invalidate(transactionID);
+ return cohortEntry;
+ }
+
+ public void removeCohortEntry(String transactionID) {
+ cohortCache.invalidate(transactionID);
+ }
+
+ public boolean isCurrentTransaction(String transactionID) {
+ return currentCohortEntry != null &&
+ currentCohortEntry.getTransactionID().equals(transactionID);
+ }
+
+ /**
+ * This method is called when a transaction is complete, successful or not. If the given
+ * given transaction ID matches the current in-progress transaction, the next cohort entry,
+ * if any, is dequeued and processed.
+ *
+ * @param transactionID the ID of the completed transaction
+ * @param removeCohortEntry if true the CohortEntry for the transaction is also removed from
+ * the cache.
+ */
+ public void currentTransactionComplete(String transactionID, boolean removeCohortEntry) {
+ if(removeCohortEntry) {
+ removeCohortEntry(transactionID);
+ }
+
+ if(isCurrentTransaction(transactionID)) {
+ // Dequeue the next cohort entry waiting in the queue.
+ currentCohortEntry = queuedCohortEntries.poll();
+ if(currentCohortEntry != null) {
+ doCanCommit(currentCohortEntry);
+ }
+ }
+ }
+
+ static class CohortEntry {
+ private final String transactionID;
+ private final DOMStoreThreePhaseCommitCohort cohort;
+ private final Modification modification;
+ private ActorRef canCommitSender;
+ private ActorRef shard;
+ private long lastAccessTime;
+
+ CohortEntry(String transactionID, DOMStoreThreePhaseCommitCohort cohort,
+ Modification modification) {
+ this.transactionID = transactionID;
+ this.cohort = cohort;
+ this.modification = modification;
+ }
+
+ void updateLastAccessTime() {
+ lastAccessTime = System.currentTimeMillis();
+ }
+
+ long getLastAccessTime() {
+ return lastAccessTime;
+ }
+
+ String getTransactionID() {
+ return transactionID;
+ }
+
+ DOMStoreThreePhaseCommitCohort getCohort() {
+ return cohort;
+ }
+
+ Modification getModification() {
+ return modification;
+ }
+
+ ActorRef getCanCommitSender() {
+ return canCommitSender;
+ }
+
+ void setCanCommitSender(ActorRef canCommitSender) {
+ this.canCommitSender = canCommitSender;
+ }
+
+ ActorRef getShard() {
+ return shard;
+ }
+
+ void setShard(ActorRef shard) {
+ this.shard = shard;
+ }
+ }
+}
import akka.persistence.RecoveryFailure;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
-import org.opendaylight.controller.cluster.common.actor.AbstractUntypedPersistentActor;
+import org.opendaylight.controller.cluster.common.actor.AbstractUntypedPersistentActorWithMetering;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardManagerIdentifier;
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shardmanager.ShardManagerInfo;
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shardmanager.ShardManagerInfoMBean;
+import org.opendaylight.controller.cluster.datastore.messages.ActorInitialized;
+import org.opendaylight.controller.cluster.datastore.messages.ActorNotInitialized;
import org.opendaylight.controller.cluster.datastore.messages.FindLocalShard;
import org.opendaylight.controller.cluster.datastore.messages.FindPrimary;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
* <li> Monitor the cluster members and store their addresses
* <ul>
*/
-public class ShardManager extends AbstractUntypedPersistentActor {
+public class ShardManager extends AbstractUntypedPersistentActorWithMetering {
protected final LoggingAdapter LOG =
Logging.getLogger(getContext().system(), this);
findLocalShard((FindLocalShard) message);
} else if (message instanceof UpdateSchemaContext) {
updateSchemaContext(message);
+ } else if(message instanceof ActorInitialized) {
+ onActorInitialized(message);
} else if (message instanceof ClusterEvent.MemberUp){
memberUp((ClusterEvent.MemberUp) message);
} else if(message instanceof ClusterEvent.MemberRemoved) {
}
+ private void onActorInitialized(Object message) {
+ final ActorRef sender = getSender();
+
+ if (sender == null) {
+ return; //why is a non-actor sending this message? Just ignore.
+ }
+
+ String actorName = sender.path().name();
+ //find shard name from actor name; actor name is stringified shardId
+ ShardIdentifier shardId = ShardIdentifier.builder().fromShardIdString(actorName).build();
+
+ if (shardId.getShardName() == null) {
+ return;
+ }
+ markShardAsInitialized(shardId.getShardName());
+ }
+
+ @VisibleForTesting protected void markShardAsInitialized(String shardName) {
+ LOG.debug("Initializing shard [{}]", shardName);
+ ShardInformation shardInformation = localShards.get(shardName);
+ if (shardInformation != null) {
+ shardInformation.setShardInitialized(true);
+ }
+ }
+
@Override protected void handleRecover(Object message) throws Exception {
if(message instanceof SchemaContextModules){
}
private void findLocalShard(FindLocalShard message) {
- ShardInformation shardInformation =
- localShards.get(message.getShardName());
+ ShardInformation shardInformation = localShards.get(message.getShardName());
- if(shardInformation != null){
- getSender().tell(new LocalShardFound(shardInformation.getActor()), getSelf());
+ if(shardInformation == null){
+ getSender().tell(new LocalShardNotFound(message.getShardName()), getSelf());
return;
}
- getSender().tell(new LocalShardNotFound(message.getShardName()),
- getSelf());
+ sendResponse(shardInformation, new LocalShardFound(shardInformation.getActor()));
+ }
+
+ private void sendResponse(ShardInformation shardInformation, Object message) {
+ if (!shardInformation.isShardInitialized()) {
+ getSender().tell(new ActorNotInitialized(), getSelf());
+ return;
+ }
+
+ getSender().tell(message, getSelf());
}
private void memberRemoved(ClusterEvent.MemberRemoved message) {
private void memberUp(ClusterEvent.MemberUp message) {
String memberName = message.member().roles().head();
- memberNameToAddress.put(memberName , message.member().address());
+ memberNameToAddress.put(memberName, message.member().address());
for(ShardInformation info : localShards.values()){
String shardName = info.getShardName();
}
private void findPrimary(FindPrimary message) {
+ final ActorRef sender = getSender();
String shardName = message.getShardName();
// First see if the there is a local replica for the shard
ShardInformation info = localShards.get(shardName);
- if(info != null) {
+ if (info != null) {
ActorPath shardPath = info.getActorPath();
- if (shardPath != null) {
- getSender()
- .tell(
- new PrimaryFound(shardPath.toString()).toSerializable(),
- getSelf());
- return;
- }
+ sendResponse(info, new PrimaryFound(shardPath.toString()).toSerializable());
+ return;
}
- List<String> members =
- configuration.getMembersFromShardName(shardName);
+ List<String> members = configuration.getMembersFromShardName(shardName);
if(cluster.getCurrentMemberName() != null) {
members.remove(cluster.getCurrentMemberName());
}
+ /**
+ * FIXME: Instead of sending remote shard actor path back to sender,
+ * forward FindPrimary message to remote shard manager
+ */
// There is no way for us to figure out the primary (for now) so assume
// that one of the remote nodes is a primary
for(String memberName : members) {
private final ActorRef actor;
private final ActorPath actorPath;
private final Map<ShardIdentifier, String> peerAddresses;
+ private boolean shardInitialized = false; //flag that determines if the actor is ready for business
private ShardInformation(String shardName, ActorRef actor,
Map<ShardIdentifier, String> peerAddresses) {
}
}
+
+ public boolean isShardInitialized() {
+ return shardInitialized;
+ }
+
+ public void setShardInitialized(boolean shardInitialized) {
+ this.shardInitialized = shardInitialized;
+ }
}
private static class ShardManagerCreator implements Creator<ShardManager> {
private final DOMStoreReadTransaction transaction;
public ShardReadTransaction(DOMStoreReadTransaction transaction, ActorRef shardActor,
- SchemaContext schemaContext, ShardStats shardStats) {
- super(shardActor, schemaContext, shardStats);
+ SchemaContext schemaContext, ShardStats shardStats, String transactionID) {
+ super(shardActor, schemaContext, shardStats, transactionID);
this.transaction = transaction;
}
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
import org.opendaylight.controller.cluster.datastore.messages.DataExists;
-import org.opendaylight.controller.cluster.datastore.messages.DeleteData;
-import org.opendaylight.controller.cluster.datastore.messages.MergeData;
import org.opendaylight.controller.cluster.datastore.messages.ReadData;
-import org.opendaylight.controller.cluster.datastore.messages.ReadyTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.WriteData;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransaction;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
/**
* @author: syedbahm
* Date: 8/6/14
*/
-public class ShardReadWriteTransaction extends ShardTransaction {
+public class ShardReadWriteTransaction extends ShardWriteTransaction {
private final DOMStoreReadWriteTransaction transaction;
public ShardReadWriteTransaction(DOMStoreReadWriteTransaction transaction, ActorRef shardActor,
- SchemaContext schemaContext, ShardStats shardStats) {
- super(shardActor, schemaContext, shardStats);
+ SchemaContext schemaContext, ShardStats shardStats, String transactionID) {
+ super(transaction, shardActor, schemaContext, shardStats, transactionID);
this.transaction = transaction;
}
public void handleReceive(Object message) throws Exception {
if(ReadData.SERIALIZABLE_CLASS.equals(message.getClass())) {
readData(transaction, ReadData.fromSerializable(message));
- } else if(WriteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- writeData(transaction, WriteData.fromSerializable(message, schemaContext));
- } else if(MergeData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- mergeData(transaction, MergeData.fromSerializable(message, schemaContext));
- } else if(DeleteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- deleteData(transaction, DeleteData.fromSerializable(message));
- } else if(ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) {
- readyTransaction(transaction, new ReadyTransaction());
} else if(DataExists.SERIALIZABLE_CLASS.equals(message.getClass())) {
dataExists(transaction, DataExists.fromSerializable(message));
} else {
super.handleReceive(message);
}
}
-
- @Override
- protected DOMStoreTransaction getDOMStoreTransaction() {
- return transaction;
- }
}
import akka.actor.Props;
import akka.actor.ReceiveTimeout;
import akka.japi.Creator;
-
import com.google.common.base.Optional;
import com.google.common.util.concurrent.CheckedFuture;
import org.opendaylight.controller.cluster.common.actor.AbstractUntypedActor;
-
-
import org.opendaylight.controller.cluster.datastore.exceptions.UnknownMessageException;
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.DataExists;
import org.opendaylight.controller.cluster.datastore.messages.DataExistsReply;
-import org.opendaylight.controller.cluster.datastore.messages.DeleteData;
-import org.opendaylight.controller.cluster.datastore.messages.DeleteDataReply;
-import org.opendaylight.controller.cluster.datastore.messages.MergeData;
-import org.opendaylight.controller.cluster.datastore.messages.MergeDataReply;
import org.opendaylight.controller.cluster.datastore.messages.ReadData;
import org.opendaylight.controller.cluster.datastore.messages.ReadDataReply;
-import org.opendaylight.controller.cluster.datastore.messages.ReadyTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.WriteData;
-import org.opendaylight.controller.cluster.datastore.messages.WriteDataReply;
-import org.opendaylight.controller.cluster.datastore.modification.CompositeModification;
-import org.opendaylight.controller.cluster.datastore.modification.DeleteModification;
-import org.opendaylight.controller.cluster.datastore.modification.ImmutableCompositeModification;
-import org.opendaylight.controller.cluster.datastore.modification.MergeModification;
-import org.opendaylight.controller.cluster.datastore.modification.MutableCompositeModification;
-import org.opendaylight.controller.cluster.datastore.modification.WriteModification;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
public abstract class ShardTransaction extends AbstractUntypedActor {
private final ActorRef shardActor;
- protected final SchemaContext schemaContext;
+ private final SchemaContext schemaContext;
private final ShardStats shardStats;
-
- private final MutableCompositeModification modification = new MutableCompositeModification();
+ private final String transactionID;
protected ShardTransaction(ActorRef shardActor, SchemaContext schemaContext,
- ShardStats shardStats) {
+ ShardStats shardStats, String transactionID) {
this.shardActor = shardActor;
this.schemaContext = schemaContext;
this.shardStats = shardStats;
+ this.transactionID = transactionID;
}
public static Props props(DOMStoreTransaction transaction, ActorRef shardActor,
- SchemaContext schemaContext,DatastoreContext datastoreContext, ShardStats shardStats) {
+ SchemaContext schemaContext,DatastoreContext datastoreContext, ShardStats shardStats,
+ String transactionID) {
return Props.create(new ShardTransactionCreator(transaction, shardActor, schemaContext,
- datastoreContext, shardStats));
+ datastoreContext, shardStats, transactionID));
}
protected abstract DOMStoreTransaction getDOMStoreTransaction();
+ protected ActorRef getShardActor() {
+ return shardActor;
+ }
+
+ protected String getTransactionID() {
+ return transactionID;
+ }
+
+ protected SchemaContext getSchemaContext() {
+ return schemaContext;
+ }
+
@Override
public void handleReceive(Object message) throws Exception {
if (message.getClass().equals(CloseTransaction.SERIALIZABLE_CLASS)) {
closeTransaction(true);
- } else if (message instanceof GetCompositedModification) {
- // This is here for testing only
- getSender().tell(new GetCompositeModificationReply(
- new ImmutableCompositeModification(modification)), getSelf());
} else if (message instanceof ReceiveTimeout) {
if(LOG.isDebugEnabled()) {
LOG.debug("Got ReceiveTimeout for inactivity - closing Tx");
}
- protected void writeData(DOMStoreWriteTransaction transaction, WriteData message) {
- modification.addModification(
- new WriteModification(message.getPath(), message.getData(),schemaContext));
- if(LOG.isDebugEnabled()) {
- LOG.debug("writeData at path : " + message.getPath().toString());
- }
- try {
- transaction.write(message.getPath(), message.getData());
- getSender().tell(new WriteDataReply().toSerializable(), getSelf());
- }catch(Exception e){
- getSender().tell(new akka.actor.Status.Failure(e), getSelf());
- }
- }
-
- protected void mergeData(DOMStoreWriteTransaction transaction, MergeData message) {
- modification.addModification(
- new MergeModification(message.getPath(), message.getData(), schemaContext));
- if(LOG.isDebugEnabled()) {
- LOG.debug("mergeData at path : " + message.getPath().toString());
- }
- try {
- transaction.merge(message.getPath(), message.getData());
- getSender().tell(new MergeDataReply().toSerializable(), getSelf());
- }catch(Exception e){
- getSender().tell(new akka.actor.Status.Failure(e), getSelf());
- }
- }
-
- protected void deleteData(DOMStoreWriteTransaction transaction, DeleteData message) {
- if(LOG.isDebugEnabled()) {
- LOG.debug("deleteData at path : " + message.getPath().toString());
- }
- modification.addModification(new DeleteModification(message.getPath()));
- try {
- transaction.delete(message.getPath());
- getSender().tell(new DeleteDataReply().toSerializable(), getSelf());
- }catch(Exception e){
- getSender().tell(new akka.actor.Status.Failure(e), getSelf());
- }
- }
-
- protected void readyTransaction(DOMStoreWriteTransaction transaction, ReadyTransaction message) {
- DOMStoreThreePhaseCommitCohort cohort = transaction.ready();
- ActorRef cohortActor = getContext().actorOf(
- ThreePhaseCommitCohort.props(cohort, shardActor, modification, shardStats), "cohort");
- getSender()
- .tell(new ReadyTransactionReply(cohortActor.path()).toSerializable(), getSelf());
-
- }
-
private static class ShardTransactionCreator implements Creator<ShardTransaction> {
private static final long serialVersionUID = 1L;
final SchemaContext schemaContext;
final DatastoreContext datastoreContext;
final ShardStats shardStats;
+ final String transactionID;
ShardTransactionCreator(DOMStoreTransaction transaction, ActorRef shardActor,
SchemaContext schemaContext, DatastoreContext datastoreContext,
- ShardStats shardStats) {
+ ShardStats shardStats, String transactionID) {
this.transaction = transaction;
this.shardActor = shardActor;
this.shardStats = shardStats;
this.schemaContext = schemaContext;
this.datastoreContext = datastoreContext;
+ this.transactionID = transactionID;
}
@Override
ShardTransaction tx;
if(transaction instanceof DOMStoreReadWriteTransaction) {
tx = new ShardReadWriteTransaction((DOMStoreReadWriteTransaction)transaction,
- shardActor, schemaContext, shardStats);
+ shardActor, schemaContext, shardStats, transactionID);
} else if(transaction instanceof DOMStoreReadTransaction) {
tx = new ShardReadTransaction((DOMStoreReadTransaction)transaction, shardActor,
- schemaContext, shardStats);
+ schemaContext, shardStats, transactionID);
} else {
tx = new ShardWriteTransaction((DOMStoreWriteTransaction)transaction,
- shardActor, schemaContext, shardStats);
+ shardActor, schemaContext, shardStats, transactionID);
}
tx.getContext().setReceiveTimeout(datastoreContext.getShardTransactionIdleTimeout());
return tx;
}
}
-
- // These classes are in here for test purposes only
-
- static class GetCompositedModification {
- }
-
-
- static class GetCompositeModificationReply {
- private final CompositeModification modification;
-
-
- GetCompositeModificationReply(CompositeModification modification) {
- this.modification = modification;
- }
-
-
- public CompositeModification getModification() {
- return modification;
- }
- }
}
return getContext().parent();
}
- private ActorRef createTypedTransactionActor(CreateTransaction createTransaction,
- String transactionId) {
+ private ActorRef createTypedTransactionActor(CreateTransaction createTransaction) {
+ String transactionName = "shard-" + createTransaction.getTransactionId();
if(createTransaction.getTransactionType() ==
TransactionProxy.TransactionType.READ_ONLY.ordinal()) {
return getContext().actorOf(
ShardTransaction.props( chain.newReadOnlyTransaction(), getShardActor(),
- schemaContext, datastoreContext, shardStats), transactionId);
+ schemaContext, datastoreContext, shardStats,
+ createTransaction.getTransactionId()), transactionName);
} else if (createTransaction.getTransactionType() ==
TransactionProxy.TransactionType.READ_WRITE.ordinal()) {
return getContext().actorOf(
ShardTransaction.props( chain.newReadWriteTransaction(), getShardActor(),
- schemaContext, datastoreContext, shardStats), transactionId);
+ schemaContext, datastoreContext, shardStats,
+ createTransaction.getTransactionId()), transactionName);
} else if (createTransaction.getTransactionType() ==
TransactionProxy.TransactionType.WRITE_ONLY.ordinal()) {
return getContext().actorOf(
ShardTransaction.props( chain.newWriteOnlyTransaction(), getShardActor(),
- schemaContext, datastoreContext, shardStats), transactionId);
+ schemaContext, datastoreContext, shardStats,
+ createTransaction.getTransactionId()), transactionName);
} else {
throw new IllegalArgumentException (
"CreateTransaction message has unidentified transaction type=" +
private void createTransaction(CreateTransaction createTransaction) {
- ActorRef transactionActor = createTypedTransactionActor(createTransaction, "shard-" + createTransaction.getTransactionId());
- getSender()
- .tell(new CreateTransactionReply(transactionActor.path().toString(),createTransaction.getTransactionId()).toSerializable(),
- getSelf());
+ ActorRef transactionActor = createTypedTransactionActor(createTransaction);
+ getSender().tell(new CreateTransactionReply(transactionActor.path().toString(),
+ createTransaction.getTransactionId()).toSerializable(), getSelf());
}
public static Props props(DOMStoreTransactionChain chain, SchemaContext schemaContext,
package org.opendaylight.controller.cluster.datastore;
import akka.actor.ActorRef;
-
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
import org.opendaylight.controller.cluster.datastore.messages.DeleteData;
+import org.opendaylight.controller.cluster.datastore.messages.DeleteDataReply;
+import org.opendaylight.controller.cluster.datastore.messages.ForwardedReadyTransaction;
import org.opendaylight.controller.cluster.datastore.messages.MergeData;
+import org.opendaylight.controller.cluster.datastore.messages.MergeDataReply;
import org.opendaylight.controller.cluster.datastore.messages.ReadyTransaction;
import org.opendaylight.controller.cluster.datastore.messages.WriteData;
+import org.opendaylight.controller.cluster.datastore.messages.WriteDataReply;
+import org.opendaylight.controller.cluster.datastore.modification.CompositeModification;
+import org.opendaylight.controller.cluster.datastore.modification.DeleteModification;
+import org.opendaylight.controller.cluster.datastore.modification.ImmutableCompositeModification;
+import org.opendaylight.controller.cluster.datastore.modification.MergeModification;
+import org.opendaylight.controller.cluster.datastore.modification.MutableCompositeModification;
+import org.opendaylight.controller.cluster.datastore.modification.WriteModification;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
* Date: 8/6/14
*/
public class ShardWriteTransaction extends ShardTransaction {
+
+ private final MutableCompositeModification modification = new MutableCompositeModification();
private final DOMStoreWriteTransaction transaction;
public ShardWriteTransaction(DOMStoreWriteTransaction transaction, ActorRef shardActor,
- SchemaContext schemaContext, ShardStats shardStats) {
- super(shardActor, schemaContext, shardStats);
+ SchemaContext schemaContext, ShardStats shardStats, String transactionID) {
+ super(shardActor, schemaContext, shardStats, transactionID);
this.transaction = transaction;
}
+ @Override
+ protected DOMStoreTransaction getDOMStoreTransaction() {
+ return transaction;
+ }
+
@Override
public void handleReceive(Object message) throws Exception {
if(WriteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- writeData(transaction, WriteData.fromSerializable(message, schemaContext));
+ writeData(transaction, WriteData.fromSerializable(message, getSchemaContext()));
} else if(MergeData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- mergeData(transaction, MergeData.fromSerializable(message, schemaContext));
+ mergeData(transaction, MergeData.fromSerializable(message, getSchemaContext()));
} else if(DeleteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
deleteData(transaction, DeleteData.fromSerializable(message));
} else if(ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) {
readyTransaction(transaction, new ReadyTransaction());
+ } else if (message instanceof GetCompositedModification) {
+ // This is here for testing only
+ getSender().tell(new GetCompositeModificationReply(
+ new ImmutableCompositeModification(modification)), getSelf());
} else {
super.handleReceive(message);
}
}
- @Override
- protected DOMStoreTransaction getDOMStoreTransaction() {
- return transaction;
+ private void writeData(DOMStoreWriteTransaction transaction, WriteData message) {
+ modification.addModification(
+ new WriteModification(message.getPath(), message.getData(), getSchemaContext()));
+ if(LOG.isDebugEnabled()) {
+ LOG.debug("writeData at path : " + message.getPath().toString());
+ }
+ try {
+ transaction.write(message.getPath(), message.getData());
+ getSender().tell(new WriteDataReply().toSerializable(), getSelf());
+ }catch(Exception e){
+ getSender().tell(new akka.actor.Status.Failure(e), getSelf());
+ }
+ }
+
+ private void mergeData(DOMStoreWriteTransaction transaction, MergeData message) {
+ modification.addModification(
+ new MergeModification(message.getPath(), message.getData(), getSchemaContext()));
+ if(LOG.isDebugEnabled()) {
+ LOG.debug("mergeData at path : " + message.getPath().toString());
+ }
+ try {
+ transaction.merge(message.getPath(), message.getData());
+ getSender().tell(new MergeDataReply().toSerializable(), getSelf());
+ }catch(Exception e){
+ getSender().tell(new akka.actor.Status.Failure(e), getSelf());
+ }
+ }
+
+ private void deleteData(DOMStoreWriteTransaction transaction, DeleteData message) {
+ if(LOG.isDebugEnabled()) {
+ LOG.debug("deleteData at path : " + message.getPath().toString());
+ }
+ modification.addModification(new DeleteModification(message.getPath()));
+ try {
+ transaction.delete(message.getPath());
+ getSender().tell(new DeleteDataReply().toSerializable(), getSelf());
+ }catch(Exception e){
+ getSender().tell(new akka.actor.Status.Failure(e), getSelf());
+ }
+ }
+
+ private void readyTransaction(DOMStoreWriteTransaction transaction, ReadyTransaction message) {
+ DOMStoreThreePhaseCommitCohort cohort = transaction.ready();
+
+ getShardActor().forward(new ForwardedReadyTransaction(getTransactionID(), cohort, modification),
+ getContext());
+ }
+
+ // These classes are in here for test purposes only
+
+ static class GetCompositedModification {
+ }
+
+ static class GetCompositeModificationReply {
+ private final CompositeModification modification;
+
+
+ GetCompositeModificationReply(CompositeModification modification) {
+ this.modification = modification;
+ }
+
+ public CompositeModification getModification() {
+ return modification;
+ }
}
}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-
-package org.opendaylight.controller.cluster.datastore;
-
-import akka.actor.ActorRef;
-import akka.actor.PoisonPill;
-import akka.actor.Props;
-import akka.event.Logging;
-import akka.event.LoggingAdapter;
-import akka.japi.Creator;
-
-import com.google.common.util.concurrent.FutureCallback;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import org.opendaylight.controller.cluster.common.actor.AbstractUntypedActor;
-
-import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
-import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.ForwardedCommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransactionReply;
-import org.opendaylight.controller.cluster.datastore.modification.CompositeModification;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
-
-public class ThreePhaseCommitCohort extends AbstractUntypedActor {
- private final DOMStoreThreePhaseCommitCohort cohort;
- private final ActorRef shardActor;
- private final CompositeModification modification;
- private final ShardStats shardStats;
-
- public ThreePhaseCommitCohort(DOMStoreThreePhaseCommitCohort cohort,
- ActorRef shardActor, CompositeModification modification, ShardStats shardStats) {
-
- this.cohort = cohort;
- this.shardActor = shardActor;
- this.modification = modification;
- this.shardStats = shardStats;
- }
-
- private final LoggingAdapter log =
- Logging.getLogger(getContext().system(), this);
-
- public static Props props(final DOMStoreThreePhaseCommitCohort cohort,
- final ActorRef shardActor, final CompositeModification modification,
- ShardStats shardStats) {
- return Props.create(new ThreePhaseCommitCohortCreator(cohort, shardActor, modification,
- shardStats));
- }
-
- @Override
- public void handleReceive(Object message) throws Exception {
- if (message.getClass()
- .equals(CanCommitTransaction.SERIALIZABLE_CLASS)) {
- canCommit(new CanCommitTransaction());
- } else if (message.getClass()
- .equals(PreCommitTransaction.SERIALIZABLE_CLASS)) {
- preCommit(new PreCommitTransaction());
- } else if (message.getClass()
- .equals(CommitTransaction.SERIALIZABLE_CLASS)) {
- commit(new CommitTransaction());
- } else if (message.getClass()
- .equals(AbortTransaction.SERIALIZABLE_CLASS)) {
- abort(new AbortTransaction());
- } else {
- unknownMessage(message);
- }
- }
-
- private void abort(AbortTransaction message) {
- final ListenableFuture<Void> future = cohort.abort();
- final ActorRef sender = getSender();
- final ActorRef self = getSelf();
-
- Futures.addCallback(future, new FutureCallback<Void>() {
- @Override
- public void onSuccess(Void v) {
- shardStats.incrementAbortTransactionsCount();
- sender
- .tell(new AbortTransactionReply().toSerializable(),
- self);
- }
-
- @Override
- public void onFailure(Throwable t) {
- LOG.error(t, "An exception happened during abort");
- sender
- .tell(new akka.actor.Status.Failure(t), self);
- }
- });
- }
-
- private void commit(CommitTransaction message) {
- // Forward the commit to the shard
- if(log.isDebugEnabled()) {
- log.debug("Forward commit transaction to Shard {} ", shardActor);
- }
- shardActor.forward(new ForwardedCommitTransaction(cohort, modification),
- getContext());
-
- getContext().parent().tell(PoisonPill.getInstance(), getSelf());
-
- }
-
- private void preCommit(PreCommitTransaction message) {
- final ListenableFuture<Void> future = cohort.preCommit();
- final ActorRef sender = getSender();
- final ActorRef self = getSelf();
- Futures.addCallback(future, new FutureCallback<Void>() {
- @Override
- public void onSuccess(Void v) {
- sender
- .tell(new PreCommitTransactionReply().toSerializable(),
- self);
- }
-
- @Override
- public void onFailure(Throwable t) {
- LOG.error(t, "An exception happened during pre-commit");
- sender
- .tell(new akka.actor.Status.Failure(t), self);
- }
- });
-
- }
-
- private void canCommit(CanCommitTransaction message) {
- final ListenableFuture<Boolean> future = cohort.canCommit();
- final ActorRef sender = getSender();
- final ActorRef self = getSelf();
- Futures.addCallback(future, new FutureCallback<Boolean>() {
- @Override
- public void onSuccess(Boolean canCommit) {
- sender.tell(new CanCommitTransactionReply(canCommit)
- .toSerializable(), self);
- }
-
- @Override
- public void onFailure(Throwable t) {
- LOG.error(t, "An exception happened during canCommit");
- sender
- .tell(new akka.actor.Status.Failure(t), self);
- }
- });
- }
-
- private static class ThreePhaseCommitCohortCreator implements Creator<ThreePhaseCommitCohort> {
- final DOMStoreThreePhaseCommitCohort cohort;
- final ActorRef shardActor;
- final CompositeModification modification;
- final ShardStats shardStats;
-
- ThreePhaseCommitCohortCreator(DOMStoreThreePhaseCommitCohort cohort,
- ActorRef shardActor, CompositeModification modification, ShardStats shardStats) {
- this.cohort = cohort;
- this.shardActor = shardActor;
- this.modification = modification;
- this.shardStats = shardStats;
- }
-
- @Override
- public ThreePhaseCommitCohort create() throws Exception {
- return new ThreePhaseCommitCohort(cohort, shardActor, modification, shardStats);
- }
- }
-}
package org.opendaylight.controller.cluster.datastore;
-import akka.actor.ActorPath;
import akka.actor.ActorSelection;
import akka.dispatch.Futures;
import akka.dispatch.OnComplete;
-
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
-
import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransactionReply;
import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
import scala.concurrent.Future;
import scala.runtime.AbstractFunction1;
-
import java.util.Collections;
import java.util.List;
private static final Logger LOG = LoggerFactory.getLogger(ThreePhaseCommitCohortProxy.class);
+ private static final ListenableFuture<Void> IMMEDIATE_SUCCESS =
+ com.google.common.util.concurrent.Futures.immediateFuture(null);
+
private final ActorContext actorContext;
- private final List<Future<ActorPath>> cohortPathFutures;
- private volatile List<ActorPath> cohortPaths;
+ private final List<Future<ActorSelection>> cohortFutures;
+ private volatile List<ActorSelection> cohorts;
private final String transactionId;
public ThreePhaseCommitCohortProxy(ActorContext actorContext,
- List<Future<ActorPath>> cohortPathFutures, String transactionId) {
+ List<Future<ActorSelection>> cohortFutures, String transactionId) {
this.actorContext = actorContext;
- this.cohortPathFutures = cohortPathFutures;
+ this.cohortFutures = cohortFutures;
this.transactionId = transactionId;
}
- private Future<Void> buildCohortPathsList() {
+ private Future<Void> buildCohortList() {
- Future<Iterable<ActorPath>> combinedFutures = Futures.sequence(cohortPathFutures,
+ Future<Iterable<ActorSelection>> combinedFutures = Futures.sequence(cohortFutures,
actorContext.getActorSystem().dispatcher());
- return combinedFutures.transform(new AbstractFunction1<Iterable<ActorPath>, Void>() {
+ return combinedFutures.transform(new AbstractFunction1<Iterable<ActorSelection>, Void>() {
@Override
- public Void apply(Iterable<ActorPath> paths) {
- cohortPaths = Lists.newArrayList(paths);
+ public Void apply(Iterable<ActorSelection> actorSelections) {
+ cohorts = Lists.newArrayList(actorSelections);
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} successfully built cohort path list: {}",
- transactionId, cohortPaths);
+ transactionId, cohorts);
}
return null;
}
// extracted from ReadyTransactionReply messages by the Futures that were obtained earlier
// and passed to us from upstream processing. If any one fails then we'll fail canCommit.
- buildCohortPathsList().onComplete(new OnComplete<Void>() {
+ buildCohortList().onComplete(new OnComplete<Void>() {
@Override
public void onComplete(Throwable failure, Void notUsed) throws Throwable {
if(failure != null) {
if(LOG.isDebugEnabled()) {
- LOG.debug("Tx {}: a cohort path Future failed: {}", transactionId, failure);
+ LOG.debug("Tx {}: a cohort Future failed: {}", transactionId, failure);
}
returnFuture.setException(failure);
} else {
// their canCommit processing. If any one fails then we'll fail canCommit.
Future<Iterable<Object>> combinedFuture =
- invokeCohorts(new CanCommitTransaction().toSerializable());
+ invokeCohorts(new CanCommitTransaction(transactionId).toSerializable());
combinedFuture.onComplete(new OnComplete<Iterable<Object>>() {
@Override
}
private Future<Iterable<Object>> invokeCohorts(Object message) {
- List<Future<Object>> futureList = Lists.newArrayListWithCapacity(cohortPaths.size());
- for(ActorPath actorPath : cohortPaths) {
+ List<Future<Object>> futureList = Lists.newArrayListWithCapacity(cohorts.size());
+ for(ActorSelection cohort : cohorts) {
if(LOG.isDebugEnabled()) {
- LOG.debug("Tx {}: Sending {} to cohort {}", transactionId, message, actorPath);
+ LOG.debug("Tx {}: Sending {} to cohort {}", transactionId, message, cohort);
}
- ActorSelection cohort = actorContext.actorSelection(actorPath);
- futureList.add(actorContext.executeRemoteOperationAsync(cohort, message));
+ futureList.add(actorContext.executeOperationAsync(cohort, message));
}
return Futures.sequence(futureList, actorContext.getActorSystem().dispatcher());
@Override
public ListenableFuture<Void> preCommit() {
- return voidOperation("preCommit", new PreCommitTransaction().toSerializable(),
- PreCommitTransactionReply.SERIALIZABLE_CLASS, true);
+ // We don't need to do anything here - preCommit is done atomically with the commit phase
+ // by the shard.
+ return IMMEDIATE_SUCCESS;
}
@Override
// exception then that exception will supersede and suppress the original exception. But
// it's the original exception that is the root cause and of more interest to the client.
- return voidOperation("abort", new AbortTransaction().toSerializable(),
+ return voidOperation("abort", new AbortTransaction(transactionId).toSerializable(),
AbortTransactionReply.SERIALIZABLE_CLASS, false);
}
@Override
public ListenableFuture<Void> commit() {
- return voidOperation("commit", new CommitTransaction().toSerializable(),
+ return voidOperation("commit", new CommitTransaction(transactionId).toSerializable(),
CommitTransactionReply.SERIALIZABLE_CLASS, true);
}
// The cohort actor list should already be built at this point by the canCommit phase but,
// if not for some reason, we'll try to build it here.
- if(cohortPaths != null) {
+ if(cohorts != null) {
finishVoidOperation(operationName, message, expectedResponseClass, propagateException,
returnFuture);
} else {
- buildCohortPathsList().onComplete(new OnComplete<Void>() {
+ buildCohortList().onComplete(new OnComplete<Void>() {
@Override
public void onComplete(Throwable failure, Void notUsed) throws Throwable {
if(failure != null) {
}
@VisibleForTesting
- List<Future<ActorPath>> getCohortPathFutures() {
- return Collections.unmodifiableList(cohortPathFutures);
+ List<Future<ActorSelection>> getCohortFutures() {
+ return Collections.unmodifiableList(cohortFutures);
}
}
package org.opendaylight.controller.cluster.datastore;
-import akka.actor.ActorPath;
+import akka.actor.ActorSelection;
import akka.dispatch.Futures;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransactionChain;
import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
public class TransactionChainProxy implements DOMStoreTransactionChain{
private final ActorContext actorContext;
private final String transactionChainId;
- private volatile List<Future<ActorPath>> cohortPathFutures = Collections.emptyList();
+ private volatile List<Future<ActorSelection>> cohortFutures = Collections.emptyList();
public TransactionChainProxy(ActorContext actorContext) {
this.actorContext = actorContext;
return transactionChainId;
}
- public void onTransactionReady(List<Future<ActorPath>> cohortPathFutures){
- this.cohortPathFutures = cohortPathFutures;
+ public void onTransactionReady(List<Future<ActorSelection>> cohortFutures){
+ this.cohortFutures = cohortFutures;
}
public void waitTillCurrentTransactionReady(){
try {
Await.result(Futures
- .sequence(this.cohortPathFutures, actorContext.getActorSystem().dispatcher()),
+ .sequence(this.cohortFutures, actorContext.getActorSystem().dispatcher()),
actorContext.getOperationDuration());
} catch (Exception e) {
throw new IllegalStateException("Failed when waiting for transaction on a chain to become ready", e);
package org.opendaylight.controller.cluster.datastore;
-import akka.actor.ActorPath;
import akka.actor.ActorSelection;
import akka.dispatch.OnComplete;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.SettableFuture;
+import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
import org.opendaylight.controller.cluster.datastore.identifiers.TransactionIdentifier;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
if(remoteTransactionActorsMB.get()) {
for(ActorSelection actor : remoteTransactionActors) {
LOG.trace("Sending CloseTransaction to {}", actor);
- actorContext.sendRemoteOperationAsync(actor,
+ actorContext.sendOperationAsync(actor,
new CloseTransaction().toSerializable());
}
}
LOG.debug("Tx {} Trying to get {} transactions ready for commit", identifier,
remoteTransactionPaths.size());
}
- List<Future<ActorPath>> cohortPathFutures = Lists.newArrayList();
+ List<Future<ActorSelection>> cohortFutures = Lists.newArrayList();
for(TransactionContext transactionContext : remoteTransactionPaths.values()) {
LOG.debug("Tx {} Readying transaction for shard {}", identifier,
transactionContext.getShardName());
}
- cohortPathFutures.add(transactionContext.readyTransaction());
+ cohortFutures.add(transactionContext.readyTransaction());
}
if(transactionChainProxy != null){
- transactionChainProxy.onTransactionReady(cohortPathFutures);
+ transactionChainProxy.onTransactionReady(cohortFutures);
}
- return new ThreePhaseCommitCohortProxy(actorContext, cohortPathFutures,
+ return new ThreePhaseCommitCohortProxy(actorContext, cohortFutures,
identifier.toString());
}
}
try {
- Object response = actorContext.executeShardOperation(shardName,
- new CreateTransaction(identifier.toString(), this.transactionType.ordinal(),
- getTransactionChainId()).toSerializable());
+ Optional<ActorSelection> primaryShard = actorContext.findPrimaryShard(shardName);
+ if (!primaryShard.isPresent()) {
+ throw new PrimaryNotFoundException("Primary could not be found for shard " + shardName);
+ }
+
+ Object response = actorContext.executeOperation(primaryShard.get(),
+ new CreateTransaction(identifier.toString(), this.transactionType.ordinal(),
+ getTransactionChainId()).toSerializable());
if (response.getClass().equals(CreateTransactionReply.SERIALIZABLE_CLASS)) {
CreateTransactionReply reply =
CreateTransactionReply.fromSerializable(response);
void closeTransaction();
- Future<ActorPath> readyTransaction();
+ Future<ActorSelection> readyTransaction();
void writeData(YangInstanceIdentifier path, NormalizedNode<?, ?> data);
return actor;
}
- private String getResolvedCohortPath(String cohortPath) {
- return actorContext.resolvePath(actorPath, cohortPath);
- }
-
@Override
public void closeTransaction() {
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} closeTransaction called", identifier);
}
- actorContext.sendRemoteOperationAsync(getActor(), new CloseTransaction().toSerializable());
+ actorContext.sendOperationAsync(getActor(), new CloseTransaction().toSerializable());
}
@Override
- public Future<ActorPath> readyTransaction() {
+ public Future<ActorSelection> readyTransaction() {
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} readyTransaction called with {} previous recorded operations pending",
identifier, recordedOperationFutures.size());
}
// Send the ReadyTransaction message to the Tx actor.
- final Future<Object> replyFuture = actorContext.executeRemoteOperationAsync(getActor(),
+ final Future<Object> replyFuture = actorContext.executeOperationAsync(getActor(),
new ReadyTransaction().toSerializable());
// Combine all the previously recorded put/merge/delete operation reply Futures and the
// Transform the combined Future into a Future that returns the cohort actor path from
// the ReadyTransactionReply. That's the end result of the ready operation.
- return combinedFutures.transform(new AbstractFunction1<Iterable<Object>, ActorPath>() {
+ return combinedFutures.transform(new AbstractFunction1<Iterable<Object>, ActorSelection>() {
@Override
- public ActorPath apply(Iterable<Object> notUsed) {
+ public ActorSelection apply(Iterable<Object> notUsed) {
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} readyTransaction: pending recorded operations succeeded",
identifier);
if(serializedReadyReply.getClass().equals(
ReadyTransactionReply.SERIALIZABLE_CLASS)) {
ReadyTransactionReply reply = ReadyTransactionReply.fromSerializable(
- actorContext.getActorSystem(), serializedReadyReply);
+ serializedReadyReply);
- String resolvedCohortPath = getResolvedCohortPath(
- reply.getCohortPath().toString());
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("Tx {} readyTransaction: resolved cohort path {}",
- identifier, resolvedCohortPath);
- }
- return actorContext.actorFor(resolvedCohortPath);
+ return actorContext.actorSelection(reply.getCohortPath());
} else {
// Throwing an exception here will fail the Future.
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} deleteData called path = {}", identifier, path);
}
- recordedOperationFutures.add(actorContext.executeRemoteOperationAsync(getActor(),
- new DeleteData(path).toSerializable() ));
+ recordedOperationFutures.add(actorContext.executeOperationAsync(getActor(),
+ new DeleteData(path).toSerializable()));
}
@Override
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} mergeData called path = {}", identifier, path);
}
- recordedOperationFutures.add(actorContext.executeRemoteOperationAsync(getActor(),
+ recordedOperationFutures.add(actorContext.executeOperationAsync(getActor(),
new MergeData(path, data, schemaContext).toSerializable()));
}
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} writeData called path = {}", identifier, path);
}
- recordedOperationFutures.add(actorContext.executeRemoteOperationAsync(getActor(),
+ recordedOperationFutures.add(actorContext.executeOperationAsync(getActor(),
new WriteData(path, data, schemaContext).toSerializable()));
}
}
};
- Future<Object> readFuture = actorContext.executeRemoteOperationAsync(getActor(),
+ Future<Object> readFuture = actorContext.executeOperationAsync(getActor(),
new ReadData(path).toSerializable());
readFuture.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
}
}
};
- Future<Object> future = actorContext.executeRemoteOperationAsync(getActor(),
+ Future<Object> future = actorContext.executeOperationAsync(getActor(),
new DataExists(path).toSerializable());
future.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
}
}
@Override
- public Future<ActorPath> readyTransaction() {
+ public Future<ActorSelection> readyTransaction() {
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} readyTransaction called", identifier);
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.exceptions;
+
+public class NotInitializedException extends RuntimeException {
+ public NotInitializedException(String message) {
+ super(message);
+ }
+}
import com.google.common.base.Preconditions;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
public class ShardIdentifier {
private final String shardName;
private final String memberName;
private final String type;
+ //format and pattern should be in sync
+ private final String format = "%s-shard-%s-%s";
+ private static final Pattern pattern = Pattern.compile("(\\S+)-shard-(\\S+)-(\\S+)");
public ShardIdentifier(String shardName, String memberName, String type) {
}
@Override public String toString() {
- StringBuilder builder = new StringBuilder();
- builder.append(memberName).append("-shard-").append(shardName).append("-").append(type);
- return builder.toString();
+ //ensure the output of toString matches the pattern above
+ return new StringBuilder(memberName)
+ .append("-shard-")
+ .append(shardName)
+ .append("-")
+ .append(type)
+ .toString();
}
public static Builder builder(){
return new Builder();
}
+ public String getShardName() {
+ return shardName;
+ }
+
+ public String getMemberName() {
+ return memberName;
+ }
+
+ public String getType() {
+ return type;
+ }
+
public static class Builder {
private String shardName;
private String memberName;
return this;
}
+ public Builder fromShardIdString(String shardId){
+ Matcher matcher = pattern.matcher(shardId);
+
+ if (matcher.matches()) {
+ memberName = matcher.group(1);
+ shardName = matcher.group(2);
+ type = matcher.group(3);
+ }
+ return this;
+ }
}
}
public class ShardTransactionIdentifier {
private final String remoteTransactionId;
- public ShardTransactionIdentifier(String remoteTransactionId) {
- this.remoteTransactionId = Preconditions.checkNotNull(remoteTransactionId, "remoteTransactionId should not be null");
+ private ShardTransactionIdentifier(String remoteTransactionId) {
+ this.remoteTransactionId = Preconditions.checkNotNull(remoteTransactionId,
+ "remoteTransactionId should not be null");
}
public static Builder builder(){
return new Builder();
}
+ public String getRemoteTransactionId() {
+ return remoteTransactionId;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) {
}
@Override public String toString() {
- final StringBuilder sb =
- new StringBuilder();
+ final StringBuilder sb = new StringBuilder();
sb.append("shard-").append(remoteTransactionId);
return sb.toString();
}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class AbortTransaction implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.AbortTransaction.class;
+ public static final Class<ThreePhaseCommitCohortMessages.AbortTransaction> SERIALIZABLE_CLASS =
+ ThreePhaseCommitCohortMessages.AbortTransaction.class;
- @Override
- public Object toSerializable() {
- return ThreePhaseCommitCohortMessages.AbortTransaction.newBuilder().build();
- }
+ private final String transactionID;
+
+ public AbortTransaction(String transactionID) {
+ this.transactionID = transactionID;
+ }
+
+ public String getTransactionID() {
+ return transactionID;
+ }
+
+ @Override
+ public Object toSerializable() {
+ return ThreePhaseCommitCohortMessages.AbortTransaction.newBuilder().
+ setTransactionId(transactionID).build();
+ }
+
+ public static AbortTransaction fromSerializable(Object message) {
+ return new AbortTransaction(((ThreePhaseCommitCohortMessages.AbortTransaction)message).
+ getTransactionId());
+ }
}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class AbortTransactionReply implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.AbortTransactionReply.class;
+ public static final Class<ThreePhaseCommitCohortMessages.AbortTransactionReply> SERIALIZABLE_CLASS =
+ ThreePhaseCommitCohortMessages.AbortTransactionReply.class;
-
- @Override
- public Object toSerializable() {
- return ThreePhaseCommitCohortMessages.AbortTransactionReply.newBuilder().build();
- }
+ @Override
+ public Object toSerializable() {
+ return ThreePhaseCommitCohortMessages.AbortTransactionReply.newBuilder().build();
+ }
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.messages;
+
+import java.io.Serializable;
+
+public class ActorInitialized implements Serializable {
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.messages;
+
+import java.io.Serializable;
+
+public class ActorNotInitialized implements Serializable {
+}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class CanCommitTransaction implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CanCommitTransaction.class;
+ public static final Class<?> SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CanCommitTransaction.class;
- @Override
- public Object toSerializable() {
- return ThreePhaseCommitCohortMessages.CanCommitTransaction.newBuilder().build();
- }
+ private final String transactionID;
+
+ public CanCommitTransaction(String transactionID) {
+ this.transactionID = transactionID;
+ }
+
+ public String getTransactionID() {
+ return transactionID;
+ }
+
+ @Override
+ public Object toSerializable() {
+ return ThreePhaseCommitCohortMessages.CanCommitTransaction.newBuilder().
+ setTransactionId(transactionID).build();
+ }
+
+ public static CanCommitTransaction fromSerializable(Object message) {
+ return new CanCommitTransaction(((ThreePhaseCommitCohortMessages.CanCommitTransaction)message).
+ getTransactionId());
+ }
}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class CanCommitTransactionReply implements SerializableMessage {
- public static Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CanCommitTransactionReply.class;
- private final Boolean canCommit;
+ public static Class<ThreePhaseCommitCohortMessages.CanCommitTransactionReply> SERIALIZABLE_CLASS =
+ ThreePhaseCommitCohortMessages.CanCommitTransactionReply.class;
- public CanCommitTransactionReply(Boolean canCommit) {
- this.canCommit = canCommit;
- }
+ private final Boolean canCommit;
- public Boolean getCanCommit() {
- return canCommit;
- }
+ public CanCommitTransactionReply(Boolean canCommit) {
+ this.canCommit = canCommit;
+ }
- @Override
- public Object toSerializable() {
- return ThreePhaseCommitCohortMessages.CanCommitTransactionReply.newBuilder().setCanCommit(canCommit).build();
- }
+ public Boolean getCanCommit() {
+ return canCommit;
+ }
+ @Override
+ public Object toSerializable() {
+ return ThreePhaseCommitCohortMessages.CanCommitTransactionReply.newBuilder().setCanCommit(canCommit).build();
+ }
- public static CanCommitTransactionReply fromSerializable(Object message) {
- return new CanCommitTransactionReply(((ThreePhaseCommitCohortMessages.CanCommitTransactionReply)message).getCanCommit());
- }
+ public static CanCommitTransactionReply fromSerializable(Object message) {
+ return new CanCommitTransactionReply(
+ ((ThreePhaseCommitCohortMessages.CanCommitTransactionReply) message).getCanCommit());
+ }
}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class CommitTransaction implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CommitTransaction.class;
+ public static final Class<ThreePhaseCommitCohortMessages.CommitTransaction> SERIALIZABLE_CLASS =
+ ThreePhaseCommitCohortMessages.CommitTransaction.class;
- @Override
- public Object toSerializable() {
- return ThreePhaseCommitCohortMessages.CommitTransaction.newBuilder().build();
- }
+ private final String transactionID;
+
+ public CommitTransaction(String transactionID) {
+ this.transactionID = transactionID;
+ }
+
+ public String getTransactionID() {
+ return transactionID;
+ }
+
+ @Override
+ public Object toSerializable() {
+ return ThreePhaseCommitCohortMessages.CommitTransaction.newBuilder().setTransactionId(
+ transactionID).build();
+ }
+
+ public static CommitTransaction fromSerializable(Object message) {
+ return new CommitTransaction(((ThreePhaseCommitCohortMessages.
+ CommitTransaction)message).getTransactionId());
+ }
}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class CommitTransactionReply implements SerializableMessage {
+ public static final Class<ThreePhaseCommitCohortMessages.CommitTransactionReply> SERIALIZABLE_CLASS =
+ ThreePhaseCommitCohortMessages.CommitTransactionReply.class;
- public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CommitTransactionReply.class;
-
- @Override
- public Object toSerializable() {
- return ThreePhaseCommitCohortMessages.CommitTransactionReply.newBuilder().build();
- }
+ @Override
+ public Object toSerializable() {
+ return ThreePhaseCommitCohortMessages.CommitTransactionReply.newBuilder().build();
+ }
}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-
-package org.opendaylight.controller.cluster.datastore.messages;
-
-import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages;
-
-public class CreateTransactionChain implements SerializableMessage{
- public static final Class SERIALIZABLE_CLASS = ShardTransactionChainMessages.CreateTransactionChain.class;
-
- @Override
- public Object toSerializable() {
- return ShardTransactionChainMessages.CreateTransactionChain.newBuilder().build();
- }
-}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-
-package org.opendaylight.controller.cluster.datastore.messages;
-
-import akka.actor.ActorPath;
-import akka.actor.ActorSystem;
-import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages;
-
-public class CreateTransactionChainReply implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ShardTransactionChainMessages.CreateTransactionChainReply.class;
- private final ActorPath transactionChainPath;
-
- public CreateTransactionChainReply(ActorPath transactionChainPath) {
- this.transactionChainPath = transactionChainPath;
- }
-
- public ActorPath getTransactionChainPath() {
- return transactionChainPath;
- }
-
- @Override
- public ShardTransactionChainMessages.CreateTransactionChainReply toSerializable() {
- return ShardTransactionChainMessages.CreateTransactionChainReply.newBuilder()
- .setTransactionChainPath(transactionChainPath.toString()).build();
- }
-
- public static CreateTransactionChainReply fromSerializable(ActorSystem actorSystem,Object serializable){
- ShardTransactionChainMessages.CreateTransactionChainReply o = (ShardTransactionChainMessages.CreateTransactionChainReply) serializable;
- return new CreateTransactionChainReply(
- actorSystem.actorFor(o.getTransactionChainPath()).path());
- }
-
-}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-
-package org.opendaylight.controller.cluster.datastore.messages;
-
-import org.opendaylight.controller.cluster.datastore.modification.Modification;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
-
-public class ForwardedCommitTransaction {
- private final DOMStoreThreePhaseCommitCohort cohort;
- private final Modification modification;
-
- public ForwardedCommitTransaction(DOMStoreThreePhaseCommitCohort cohort, Modification modification){
- this.cohort = cohort;
- this.modification = modification;
- }
-
- public DOMStoreThreePhaseCommitCohort getCohort() {
- return cohort;
- }
-
- public Modification getModification() {
- return modification;
- }
-}
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.messages;
+
+import org.opendaylight.controller.cluster.datastore.modification.Modification;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+
+/**
+ * Transaction ReadyTransaction message that is forwarded to the local Shard from the ShardTransaction.
+ *
+ * @author Thomas Pantelis
+ */
+public class ForwardedReadyTransaction {
+ private final String transactionID;
+ private final DOMStoreThreePhaseCommitCohort cohort;
+ private final Modification modification;
+
+ public ForwardedReadyTransaction(String transactionID, DOMStoreThreePhaseCommitCohort cohort,
+ Modification modification) {
+ this.transactionID = transactionID;
+ this.cohort = cohort;
+ this.modification = modification;
+ }
+
+ public String getTransactionID() {
+ return transactionID;
+ }
+
+ public DOMStoreThreePhaseCommitCohort getCohort() {
+ return cohort;
+ }
+
+ public Modification getModification() {
+ return modification;
+ }
+}
package org.opendaylight.controller.cluster.datastore.messages;
-import akka.actor.ActorPath;
-import akka.actor.ActorSystem;
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
public class ReadyTransactionReply implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.ReadyTransactionReply.class;
- private final ActorPath cohortPath;
+ public static final Class<ShardTransactionMessages.ReadyTransactionReply> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.ReadyTransactionReply.class;
- public ReadyTransactionReply(ActorPath cohortPath) {
+ private final String cohortPath;
- this.cohortPath = cohortPath;
- }
+ public ReadyTransactionReply(String cohortPath) {
- public ActorPath getCohortPath() {
- return cohortPath;
- }
+ this.cohortPath = cohortPath;
+ }
- @Override
- public ShardTransactionMessages.ReadyTransactionReply toSerializable() {
- return ShardTransactionMessages.ReadyTransactionReply.newBuilder()
- .setActorPath(cohortPath.toString()).build();
- }
+ public String getCohortPath() {
+ return cohortPath;
+ }
- public static ReadyTransactionReply fromSerializable(ActorSystem actorSystem,Object serializable){
- ShardTransactionMessages.ReadyTransactionReply o = (ShardTransactionMessages.ReadyTransactionReply) serializable;
- return new ReadyTransactionReply(
- actorSystem.actorFor(o.getActorPath()).path());
- }
+ @Override
+ public ShardTransactionMessages.ReadyTransactionReply toSerializable() {
+ return ShardTransactionMessages.ReadyTransactionReply.newBuilder().
+ setActorPath(cohortPath).build();
+ }
+
+ public static ReadyTransactionReply fromSerializable(Object serializable) {
+ ShardTransactionMessages.ReadyTransactionReply o =
+ (ShardTransactionMessages.ReadyTransactionReply) serializable;
+
+ return new ReadyTransactionReply(o.getActorPath());
+ }
}
import akka.actor.ActorSelection;
import akka.actor.ActorSystem;
import akka.actor.PoisonPill;
-import akka.pattern.Patterns;
import akka.util.Timeout;
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
import org.opendaylight.controller.cluster.datastore.ClusterWrapper;
import org.opendaylight.controller.cluster.datastore.Configuration;
-import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
+import org.opendaylight.controller.cluster.datastore.exceptions.NotInitializedException;
import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
+import org.opendaylight.controller.cluster.datastore.messages.ActorNotInitialized;
import org.opendaylight.controller.cluster.datastore.messages.FindLocalShard;
import org.opendaylight.controller.cluster.datastore.messages.FindPrimary;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
}
/**
- * Finds the primary for a given shard
+ * Finds the primary shard for the given shard name
*
* @param shardName
* @return
*/
- public ActorSelection findPrimary(String shardName) {
- String path = findPrimaryPath(shardName);
- return actorSystem.actorSelection(path);
+ public Optional<ActorSelection> findPrimaryShard(String shardName) {
+ String path = findPrimaryPathOrNull(shardName);
+ if (path == null){
+ return Optional.absent();
+ }
+ return Optional.of(actorSystem.actorSelection(path));
}
/**
* @return a reference to a local shard actor which represents the shard
* specified by the shardName
*/
- public ActorRef findLocalShard(String shardName) {
- Object result = executeLocalOperation(shardManager,
- new FindLocalShard(shardName));
+ public Optional<ActorRef> findLocalShard(String shardName) {
+ Object result = executeOperation(shardManager, new FindLocalShard(shardName));
if (result instanceof LocalShardFound) {
LocalShardFound found = (LocalShardFound) result;
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("Local shard found {}", found.getPath());
- }
- return found.getPath();
+ LOG.debug("Local shard found {}", found.getPath());
+ return Optional.of(found.getPath());
}
- return null;
+ return Optional.absent();
}
- public String findPrimaryPath(String shardName) {
- Object result = executeLocalOperation(shardManager,
- new FindPrimary(shardName).toSerializable());
+ private String findPrimaryPathOrNull(String shardName) {
+ Object result = executeOperation(shardManager, new FindPrimary(shardName).toSerializable());
if (result.getClass().equals(PrimaryFound.SERIALIZABLE_CLASS)) {
PrimaryFound found = PrimaryFound.fromSerializable(result);
- if(LOG.isDebugEnabled()) {
- LOG.debug("Primary found {}", found.getPrimaryPath());
- }
+ LOG.debug("Primary found {}", found.getPrimaryPath());
return found.getPrimaryPath();
+
+ } else if (result.getClass().equals(ActorNotInitialized.class)){
+ throw new NotInitializedException(
+ String.format("Found primary shard[%s] but its not initialized yet. Please try again later", shardName)
+ );
+
+ } else {
+ return null;
}
- throw new PrimaryNotFoundException("Could not find primary for shardName " + shardName);
}
* @param message
* @return The response of the operation
*/
- public Object executeLocalOperation(ActorRef actor, Object message) {
- Future<Object> future = ask(actor, message, operationTimeout);
+ public Object executeOperation(ActorRef actor, Object message) {
+ Future<Object> future = executeOperationAsync(actor, message, operationTimeout);
try {
return Await.result(future, operationDuration);
} catch (Exception e) {
- throw new TimeoutException("Sending message " + message.getClass().toString() + " to actor " + actor.toString() + " failed" , e);
+ throw new TimeoutException("Sending message " + message.getClass().toString() +
+ " to actor " + actor.toString() + " failed. Try again later.", e);
}
}
+ public Future<Object> executeOperationAsync(ActorRef actor, Object message, Timeout timeout) {
+ Preconditions.checkArgument(actor != null, "actor must not be null");
+ Preconditions.checkArgument(message != null, "message must not be null");
+
+ LOG.debug("Sending message {} to {}", message.getClass().toString(), actor.toString());
+ return ask(actor, message, timeout);
+ }
+
/**
* Execute an operation on a remote actor and wait for it's response
*
* @param message
* @return
*/
- public Object executeRemoteOperation(ActorSelection actor, Object message) {
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("Sending remote message {} to {}", message.getClass().toString(),
- actor.toString());
- }
- Future<Object> future = ask(actor, message, operationTimeout);
+ public Object executeOperation(ActorSelection actor, Object message) {
+ Future<Object> future = executeOperationAsync(actor, message);
try {
return Await.result(future, operationDuration);
} catch (Exception e) {
throw new TimeoutException("Sending message " + message.getClass().toString() +
- " to actor " + actor.toString() + " failed" , e);
+ " to actor " + actor.toString() + " failed. Try again later.", e);
}
}
* @param message the message to send
* @return a Future containing the eventual result
*/
- public Future<Object> executeRemoteOperationAsync(ActorSelection actor, Object message) {
+ public Future<Object> executeOperationAsync(ActorSelection actor, Object message) {
+ Preconditions.checkArgument(actor != null, "actor must not be null");
+ Preconditions.checkArgument(message != null, "message must not be null");
+
+ LOG.debug("Sending message {} to {}", message.getClass().toString(), actor.toString());
- if(LOG.isDebugEnabled()) {
- LOG.debug("Sending remote message {} to {}", message.getClass().toString(), actor.toString());
- }
return ask(actor, message, operationTimeout);
}
* @param actor the ActorSelection
* @param message the message to send
*/
- public void sendRemoteOperationAsync(ActorSelection actor, Object message) {
- actor.tell(message, ActorRef.noSender());
- }
-
- public void sendShardOperationAsync(String shardName, Object message) {
- ActorSelection primary = findPrimary(shardName);
-
- primary.tell(message, ActorRef.noSender());
- }
-
-
- /**
- * Execute an operation on the primary for a given shard
- * <p>
- * This method first finds the primary for a given shard ,then sends
- * the message to the remote shard and waits for a response
- * </p>
- *
- * @param shardName
- * @param message
- * @return
- * @throws org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException if the message to the remote shard times out
- * @throws org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException if the primary shard is not found
- */
- public Object executeShardOperation(String shardName, Object message) {
- ActorSelection primary = findPrimary(shardName);
-
- return executeRemoteOperation(primary, message);
- }
-
- /**
- * Execute an operation on the the local shard only
- * <p>
- * This method first finds the address of the local shard if any. It then
- * executes the operation on it.
- * </p>
- *
- * @param shardName the name of the shard on which the operation needs to be executed
- * @param message the message that needs to be sent to the shard
- * @return the message that was returned by the local actor on which the
- * the operation was executed. If a local shard was not found then
- * null is returned
- * @throws org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException
- * if the operation does not complete in a specified time duration
- */
- public Object executeLocalShardOperation(String shardName, Object message) {
- ActorRef local = findLocalShard(shardName);
-
- if(local != null) {
- return executeLocalOperation(local, message);
- }
-
- return null;
- }
+ public void sendOperationAsync(ActorSelection actor, Object message) {
+ Preconditions.checkArgument(actor != null, "actor must not be null");
+ Preconditions.checkArgument(message != null, "message must not be null");
+ LOG.debug("Sending message {} to {}", message.getClass().toString(), actor.toString());
- /**
- * Execute an operation on the the local shard only asynchronously
- *
- * <p>
- * This method first finds the address of the local shard if any. It then
- * executes the operation on it.
- * </p>
- *
- * @param shardName the name of the shard on which the operation needs to be executed
- * @param message the message that needs to be sent to the shard
- * @param timeout the amount of time that this method should wait for a response before timing out
- * @return null if the shard could not be located else a future on which the caller can wait
- *
- */
- public Future executeLocalShardOperationAsync(String shardName, Object message, Timeout timeout) {
- ActorRef local = findLocalShard(shardName);
- if(local == null){
- return null;
- }
- return Patterns.ask(local, message, timeout);
+ actor.tell(message, ActorRef.noSender());
}
-
-
public void shutdown() {
shardManager.tell(PoisonPill.getInstance(), null);
actorSystem.shutdown();
}
- /**
- * @deprecated Need to stop using this method. There are ways to send a
- * remote ActorRef as a string which should be used instead of this hack
- *
- * @param primaryPath
- * @param localPathOfRemoteActor
- * @return
- */
- @Deprecated
- public String resolvePath(final String primaryPath,
- final String localPathOfRemoteActor) {
- StringBuilder builder = new StringBuilder();
- String[] primaryPathElements = primaryPath.split("/");
- builder.append(primaryPathElements[0]).append("//")
- .append(primaryPathElements[1]).append(primaryPathElements[2]);
- String[] remotePathElements = localPathOfRemoteActor.split("/");
- for (int i = 3; i < remotePathElements.length; i++) {
- builder.append("/").append(remotePathElements[i]);
- }
-
- return builder.toString();
-
- }
-
- public ActorPath actorFor(String path){
- return actorSystem.actorFor(path).path();
- }
-
public String getCurrentMemberName(){
return clusterWrapper.getCurrentMemberName();
}
*/
public void broadcast(Object message){
for(String shardName : configuration.getAllShardNames()){
- try {
- sendShardOperationAsync(shardName, message);
- } catch(Exception e){
- LOG.warn("broadcast failed to send message " + message.getClass().getSimpleName() + " to shard " + shardName, e);
+
+ Optional<ActorSelection> primary = findPrimaryShard(shardName);
+ if (primary.isPresent()) {
+ primary.get().tell(message, ActorRef.noSender());
+ } else {
+ LOG.warn("broadcast failed to send message {} to shard {}. Primary not found",
+ message.getClass().getSimpleName(), shardName);
}
}
}
props = new ConfigProperties();
}
- DatastoreContext datastoreContext = new DatastoreContext("DistributedConfigDatastore",
- InMemoryDOMDataStoreConfigProperties.create(
+ DatastoreContext datastoreContext = DatastoreContext.newBuilder()
+ .dataStoreMXBeanType("DistributedConfigDatastore")
+ .dataStoreProperties(InMemoryDOMDataStoreConfigProperties.create(
props.getMaxShardDataChangeExecutorPoolSize().getValue().intValue(),
props.getMaxShardDataChangeExecutorQueueSize().getValue().intValue(),
props.getMaxShardDataChangeListenerQueueSize().getValue().intValue(),
- props.getMaxShardDataStoreExecutorQueueSize().getValue().intValue()),
- Duration.create(props.getShardTransactionIdleTimeoutInMinutes().getValue(),
- TimeUnit.MINUTES),
- props.getOperationTimeoutInSeconds().getValue(),
- props.getShardJournalRecoveryLogBatchSize().getValue().intValue(),
- props.getShardSnapshotBatchCount().getValue().intValue(),
- props.getShardHearbeatIntervalInMillis().getValue());
+ props.getMaxShardDataStoreExecutorQueueSize().getValue().intValue()))
+ .shardTransactionIdleTimeout(Duration.create(
+ props.getShardTransactionIdleTimeoutInMinutes().getValue(), TimeUnit.MINUTES))
+ .operationTimeoutInSeconds(props.getOperationTimeoutInSeconds().getValue())
+ .shardJournalRecoveryLogBatchSize(props.getShardJournalRecoveryLogBatchSize().
+ getValue().intValue())
+ .shardSnapshotBatchCount(props.getShardSnapshotBatchCount().getValue().intValue())
+ .shardHeartbeatIntervalInMillis(props.getShardHearbeatIntervalInMillis().getValue())
+ .shardTransactionCommitTimeoutInSeconds(
+ props.getShardTransactionCommitTimeoutInSeconds().getValue().intValue())
+ .shardTransactionCommitQueueCapacity(
+ props.getShardTransactionCommitQueueCapacity().getValue().intValue())
+ .build();
return DistributedDataStoreFactory.createInstance("config", getConfigSchemaServiceDependency(),
datastoreContext, bundleContext);
props = new OperationalProperties();
}
- DatastoreContext datastoreContext = new DatastoreContext("DistributedOperationalDatastore",
- InMemoryDOMDataStoreConfigProperties.create(
+ DatastoreContext datastoreContext = DatastoreContext.newBuilder()
+ .dataStoreMXBeanType("DistributedOperationalDatastore")
+ .dataStoreProperties(InMemoryDOMDataStoreConfigProperties.create(
props.getMaxShardDataChangeExecutorPoolSize().getValue().intValue(),
props.getMaxShardDataChangeExecutorQueueSize().getValue().intValue(),
props.getMaxShardDataChangeListenerQueueSize().getValue().intValue(),
- props.getMaxShardDataStoreExecutorQueueSize().getValue().intValue()),
- Duration.create(props.getShardTransactionIdleTimeoutInMinutes().getValue(),
- TimeUnit.MINUTES),
- props.getOperationTimeoutInSeconds().getValue(),
- props.getShardJournalRecoveryLogBatchSize().getValue().intValue(),
- props.getShardSnapshotBatchCount().getValue().intValue(),
- props.getShardHearbeatIntervalInMillis().getValue());
+ props.getMaxShardDataStoreExecutorQueueSize().getValue().intValue()))
+ .shardTransactionIdleTimeout(Duration.create(
+ props.getShardTransactionIdleTimeoutInMinutes().getValue(), TimeUnit.MINUTES))
+ .operationTimeoutInSeconds(props.getOperationTimeoutInSeconds().getValue())
+ .shardJournalRecoveryLogBatchSize(props.getShardJournalRecoveryLogBatchSize().
+ getValue().intValue())
+ .shardSnapshotBatchCount(props.getShardSnapshotBatchCount().getValue().intValue())
+ .shardHeartbeatIntervalInMillis(props.getShardHearbeatIntervalInMillis().getValue())
+ .shardTransactionCommitTimeoutInSeconds(
+ props.getShardTransactionCommitTimeoutInSeconds().getValue().intValue())
+ .shardTransactionCommitQueueCapacity(
+ props.getShardTransactionCommitQueueCapacity().getValue().intValue())
+ .build();
return DistributedDataStoreFactory.createInstance("operational",
getOperationalSchemaServiceDependency(), datastoreContext, bundleContext);
leaf max-shard-data-change-listener-queue-size {
default 1000;
type non-zero-uint32-type;
- description "The maximum queue size for each shard's data store data change listeners.";
+ description "The maximum queue size for each shard's data store data change listener.";
}
leaf max-shard-data-store-executor-queue-size {
description "The maximum number of journal log entries to batch on recovery for a shard before committing to the data store.";
}
+ leaf shard-transaction-commit-timeout-in-seconds {
+ default 30;
+ type non-zero-uint32-type;
+ description "The maximum amount of time a shard transaction three-phase commit can be idle without receiving the next messages before it aborts the transaction";
+ }
+
+ leaf shard-transaction-commit-queue-capacity {
+ default 20000;
+ type non-zero-uint32-type;
+ description "The maximum allowed capacity for each shard's transaction commit queue.";
+ }
+
leaf enable-metric-capture {
default false;
type boolean;
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-
-package org.opendaylight.controller.cluster.datastore;
-
-import akka.actor.ActorPath;
-import akka.actor.ActorRef;
-import akka.actor.ActorSelection;
-import akka.actor.Props;
-import akka.event.Logging;
-import akka.testkit.JavaTestKit;
-import org.junit.Test;
-import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
-import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.CreateTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.ReadyTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext;
-import org.opendaylight.controller.cluster.datastore.messages.WriteData;
-import org.opendaylight.controller.cluster.datastore.messages.WriteDataReply;
-import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
-import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import scala.concurrent.Await;
-import scala.concurrent.Future;
-import scala.concurrent.duration.FiniteDuration;
-
-import java.util.Collections;
-
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertTrue;
-import static org.junit.Assert.assertNotNull;
-
-public class BasicIntegrationTest extends AbstractActorTest {
-
- @Test
- public void integrationTest() throws Exception{
- // System.setProperty("shard.persistent", "true");
- // This test will
- // - create a Shard
- // - initiate a transaction
- // - write something
- // - read the transaction for commit
- // - commit the transaction
-
-
- new JavaTestKit(getSystem()) {{
- final ShardIdentifier identifier =
- ShardIdentifier.builder().memberName("member-1")
- .shardName("inventory").type("config").build();
-
- final SchemaContext schemaContext = TestModel.createTestContext();
- DatastoreContext datastoreContext = new DatastoreContext();
-
- final Props props = Shard.props(identifier, Collections.EMPTY_MAP, datastoreContext, TestModel.createTestContext());
- final ActorRef shard = getSystem().actorOf(props);
-
- new Within(duration("10 seconds")) {
- @Override
- protected void run() {
- shard.tell(new UpdateSchemaContext(schemaContext), getRef());
-
-
- // Wait for a specific log message to show up
- final boolean result =
- new JavaTestKit.EventFilter<Boolean>(Logging.Info.class
- ) {
- @Override
- protected Boolean run() {
- return true;
- }
- }.from(shard.path().toString())
- .message("Switching from state Candidate to Leader")
- .occurrences(1).exec();
-
- assertEquals(true, result);
-
- // Create a transaction on the shard
- shard.tell(new CreateTransaction("txn-1", TransactionProxy.TransactionType.WRITE_ONLY.ordinal() ).toSerializable(), getRef());
-
- final ActorSelection transaction =
- new ExpectMsg<ActorSelection>(duration("3 seconds"), "CreateTransactionReply") {
- @Override
- protected ActorSelection match(Object in) {
- if (CreateTransactionReply.SERIALIZABLE_CLASS.equals(in.getClass())) {
- CreateTransactionReply reply = CreateTransactionReply.fromSerializable(in);
- return getSystem()
- .actorSelection(reply
- .getTransactionPath());
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
-
- assertNotNull(transaction);
-
- System.out.println("Successfully created transaction");
-
- // 3. Write some data
- transaction.tell(new WriteData(TestModel.TEST_PATH,
- ImmutableNodes.containerNode(TestModel.TEST_QNAME), schemaContext).toSerializable(),
- getRef());
-
- Boolean writeDone = new ExpectMsg<Boolean>(duration("3 seconds"), "WriteDataReply") {
- @Override
- protected Boolean match(Object in) {
- if (in.getClass().equals(WriteDataReply.SERIALIZABLE_CLASS)) {
- return true;
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
-
- assertTrue(writeDone);
-
- System.out.println("Successfully wrote data");
-
- // 4. Ready the transaction for commit
-
- transaction.tell(new ReadyTransaction().toSerializable(), getRef());
-
- final ActorSelection cohort =
- new ExpectMsg<ActorSelection>(duration("3 seconds"), "ReadyTransactionReply") {
- @Override
- protected ActorSelection match(Object in) {
- if (in.getClass().equals(ReadyTransactionReply.SERIALIZABLE_CLASS)) {
- ActorPath cohortPath =
- ReadyTransactionReply.fromSerializable(getSystem(),in)
- .getCohortPath();
- return getSystem()
- .actorSelection(cohortPath);
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
-
- assertNotNull(cohort);
-
- System.out.println("Successfully readied the transaction");
-
- // 5. PreCommit the transaction
-
- cohort.tell(new PreCommitTransaction().toSerializable(), getRef());
-
- Boolean preCommitDone =
- new ExpectMsg<Boolean>(duration("3 seconds"), "PreCommitTransactionReply") {
- @Override
- protected Boolean match(Object in) {
- if (in.getClass().equals(PreCommitTransactionReply.SERIALIZABLE_CLASS)) {
- return true;
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
-
- assertTrue(preCommitDone);
-
- System.out.println("Successfully pre-committed the transaction");
-
- // 6. Commit the transaction
- cohort.tell(new CommitTransaction().toSerializable(), getRef());
-
- // FIXME : Add assertions that the commit worked and that the cohort and transaction actors were terminated
-
- System.out.println("TODO : Check Successfully committed the transaction");
- }
-
-
- };
- }
-
- private ActorRef watchActor(ActorSelection actor) {
- Future<ActorRef> future = actor
- .resolveOne(FiniteDuration.apply(100, "milliseconds"));
-
- try {
- ActorRef actorRef = Await.result(future,
- FiniteDuration.apply(100, "milliseconds"));
-
- watch(actorRef);
-
- return actorRef;
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
-
- }
- };
-
-
- }
-}
ActorContext
testContext = new ActorContext(getSystem(), getSystem().actorOf(Props.create(DoNothingActor.class)), new MockClusterWrapper(), new MockConfiguration());
Object messages = testContext
- .executeLocalOperation(actorRef, "messages");
+ .executeOperation(actorRef, "messages");
Assert.assertNotNull(messages);
ActorContext
testContext = new ActorContext(getSystem(), getSystem().actorOf(Props.create(DoNothingActor.class)),new MockClusterWrapper(), new MockConfiguration());
Object messages = testContext
- .executeLocalOperation(actorRef, "messages");
+ .executeOperation(actorRef, "messages");
assertNotNull(messages);
ActorContext
testContext = new ActorContext(getSystem(), getSystem().actorOf(Props.create(DoNothingActor.class)),new MockClusterWrapper(), new MockConfiguration());
Object messages = testContext
- .executeLocalOperation(actorRef, "messages");
+ .executeOperation(actorRef, "messages");
assertNotNull(messages);
package org.opendaylight.controller.cluster.datastore;
import akka.actor.ActorRef;
+import akka.actor.DeadLetter;
import akka.actor.Props;
import akka.testkit.JavaTestKit;
import org.junit.Test;
+import org.mockito.Mockito;
import org.opendaylight.controller.cluster.datastore.messages.DataChanged;
import org.opendaylight.controller.cluster.datastore.messages.DataChangedReply;
import org.opendaylight.controller.cluster.datastore.messages.EnableNotification;
import org.opendaylight.controller.md.cluster.datastore.model.CompositeModel;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
public class DataChangeListenerTest extends AbstractActorTest {
- private static class MockDataChangedEvent implements AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> {
- Map<YangInstanceIdentifier,NormalizedNode<?,?>> createdData = new HashMap<>();
- Map<YangInstanceIdentifier,NormalizedNode<?,?>> updatedData = new HashMap<>();
- Map<YangInstanceIdentifier,NormalizedNode<?,?>> originalData = new HashMap<>();
-
-
-
- @Override
- public Map<YangInstanceIdentifier, NormalizedNode<?, ?>> getCreatedData() {
- createdData.put(CompositeModel.FAMILY_PATH, CompositeModel.createFamily());
- return createdData;
- }
-
- @Override
- public Map<YangInstanceIdentifier, NormalizedNode<?, ?>> getUpdatedData() {
- updatedData.put(CompositeModel.FAMILY_PATH, CompositeModel.createFamily());
- return updatedData;
-
- }
-
- @Override
- public Set<YangInstanceIdentifier> getRemovedPaths() {
- Set<YangInstanceIdentifier>ids = new HashSet();
- ids.add( CompositeModel.TEST_PATH);
- return ids;
- }
-
- @Override
- public Map<YangInstanceIdentifier, NormalizedNode<?, ?>> getOriginalData() {
- originalData.put(CompositeModel.FAMILY_PATH, CompositeModel.createFamily());
- return originalData;
- }
-
- @Override public NormalizedNode<?, ?> getOriginalSubtree() {
-
-
- return originalData.put(CompositeModel.FAMILY_PATH, CompositeModel.createFamily());
- }
-
- @Override public NormalizedNode<?, ?> getUpdatedSubtree() {
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ @Test
+ public void testDataChangedWhenNotificationsAreEnabled(){
+ new JavaTestKit(getSystem()) {{
+ final AsyncDataChangeEvent mockChangeEvent = Mockito.mock(AsyncDataChangeEvent.class);
+ final AsyncDataChangeListener mockListener = Mockito.mock(AsyncDataChangeListener.class);
+ final Props props = DataChangeListener.props(mockListener);
+ final ActorRef subject = getSystem().actorOf(props, "testDataChangedNotificationsEnabled");
- //fixme: need to have some valid data here
- return originalData.put(CompositeModel.FAMILY_PATH, CompositeModel.createFamily());
- }
- }
+ // Let the DataChangeListener know that notifications should be enabled
+ subject.tell(new EnableNotification(true), getRef());
- private class MockDataChangeListener implements AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>> {
- private boolean gotIt = false;
- private AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change;
+ subject.tell(new DataChanged(CompositeModel.createTestContext(), mockChangeEvent),
+ getRef());
- @Override public void onDataChanged(
- AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
- gotIt = true;this.change=change;
- }
+ expectMsgClass(DataChangedReply.class);
- public boolean gotIt() {
- return gotIt;
- }
- public AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> getChange(){
- return change;
- }
+ Mockito.verify(mockListener).onDataChanged(mockChangeEvent);
+ }};
}
+ @SuppressWarnings({ "rawtypes", "unchecked" })
@Test
- public void testDataChangedWhenNotificationsAreEnabled(){
+ public void testDataChangedWhenNotificationsAreDisabled(){
new JavaTestKit(getSystem()) {{
- final MockDataChangeListener listener = new MockDataChangeListener();
- final Props props = DataChangeListener.props(listener);
+ final AsyncDataChangeEvent mockChangeEvent = Mockito.mock(AsyncDataChangeEvent.class);
+ final AsyncDataChangeListener mockListener = Mockito.mock(AsyncDataChangeListener.class);
+ final Props props = DataChangeListener.props(mockListener);
final ActorRef subject =
- getSystem().actorOf(props, "testDataChangedNotificationsEnabled");
+ getSystem().actorOf(props, "testDataChangedNotificationsDisabled");
+
+ subject.tell(new DataChanged(CompositeModel.createTestContext(), mockChangeEvent),
+ getRef());
new Within(duration("1 seconds")) {
@Override
protected void run() {
-
- // Let the DataChangeListener know that notifications should
- // be enabled
- subject.tell(new EnableNotification(true), getRef());
-
- subject.tell(
- new DataChanged(CompositeModel.createTestContext(),new MockDataChangedEvent()),
- getRef());
-
- final Boolean out = new ExpectMsg<Boolean>(duration("800 millis"), "dataChanged") {
- // do not put code outside this method, will run afterwards
- @Override
- protected Boolean match(Object in) {
- if (in != null && in.getClass().equals(DataChangedReply.class)) {
-
- return true;
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
-
- assertTrue(out);
- assertTrue(listener.gotIt());
- assertNotNull(listener.getChange().getCreatedData());
-
expectNoMsg();
+
+ Mockito.verify(mockListener, Mockito.never()).onDataChanged(
+ Mockito.any(AsyncDataChangeEvent.class));
}
};
}};
}
+ @SuppressWarnings({ "rawtypes", "unchecked" })
@Test
- public void testDataChangedWhenNotificationsAreDisabled(){
+ public void testDataChangedWithNoSender(){
new JavaTestKit(getSystem()) {{
- final MockDataChangeListener listener = new MockDataChangeListener();
- final Props props = DataChangeListener.props(listener);
- final ActorRef subject =
- getSystem().actorOf(props, "testDataChangedNotificationsDisabled");
+ final AsyncDataChangeEvent mockChangeEvent = Mockito.mock(AsyncDataChangeEvent.class);
+ final AsyncDataChangeListener mockListener = Mockito.mock(AsyncDataChangeListener.class);
+ final Props props = DataChangeListener.props(mockListener);
+ final ActorRef subject = getSystem().actorOf(props, "testDataChangedWithNoSender");
+ // Let the DataChangeListener know that notifications should be enabled
+ subject.tell(new EnableNotification(true), ActorRef.noSender());
+
+ subject.tell(new DataChanged(CompositeModel.createTestContext(), mockChangeEvent),
+ ActorRef.noSender());
+
+ getSystem().eventStream().subscribe(getRef(), DeadLetter.class);
new Within(duration("1 seconds")) {
@Override
protected void run() {
-
- subject.tell(
- new DataChanged(CompositeModel.createTestContext(),new MockDataChangedEvent()),
- getRef());
-
expectNoMsg();
}
};
package org.opendaylight.controller.cluster.datastore;
+import akka.actor.ActorRef;
import akka.actor.ActorSystem;
-import akka.event.Logging;
-import akka.testkit.JavaTestKit;
-
+import akka.actor.PoisonPill;
import com.google.common.base.Optional;
-import com.google.common.util.concurrent.ListenableFuture;
-
-import junit.framework.Assert;
-
-import org.apache.commons.io.FileUtils;
-import org.junit.After;
-import org.junit.Before;
+import com.google.common.util.concurrent.Uninterruptibles;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategyFactory;
import org.opendaylight.controller.cluster.datastore.utils.MockClusterWrapper;
import org.opendaylight.controller.md.cluster.datastore.model.PeopleModel;
import org.opendaylight.controller.md.cluster.datastore.model.SchemaContextHelper;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.concurrent.ExecutionException;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertTrue;
-import static junit.framework.Assert.fail;
+public class DistributedDataStoreIntegrationTest extends AbstractActorTest {
-public class DistributedDataStoreIntegrationTest {
+ @Test
+ public void testWriteTransactionWithSingleShard() throws Exception{
+ System.setProperty("shard.persistent", "true");
+ new IntegrationTestKit(getSystem()) {{
+ DistributedDataStore dataStore =
+ setupDistributedDataStore("transactionIntegrationTest", "test-1");
- private static ActorSystem system;
+ testWriteTransaction(dataStore, TestModel.TEST_PATH,
+ ImmutableNodes.containerNode(TestModel.TEST_QNAME));
- @Before
- public void setUp() throws IOException {
- File journal = new File("journal");
+ testWriteTransaction(dataStore, TestModel.OUTER_LIST_PATH,
+ ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build());
- if(journal.exists()) {
- FileUtils.deleteDirectory(journal);
- }
+ cleanup(dataStore);
+ }};
+ }
+ @Test
+ public void testWriteTransactionWithMultipleShards() throws Exception{
+ System.setProperty("shard.persistent", "true");
+ new IntegrationTestKit(getSystem()) {{
+ DistributedDataStore dataStore =
+ setupDistributedDataStore("testWriteTransactionWithMultipleShards", "cars-1", "people-1");
- System.setProperty("shard.persistent", "false");
- system = ActorSystem.create("test");
- }
+ DOMStoreWriteTransaction writeTx = dataStore.newWriteOnlyTransaction();
+ assertNotNull("newWriteOnlyTransaction returned null", writeTx);
- @After
- public void tearDown() {
- JavaTestKit.shutdownActorSystem(system);
- system = null;
- }
+ YangInstanceIdentifier nodePath1 = CarsModel.BASE_PATH;
+ NormalizedNode<?, ?> nodeToWrite1 = CarsModel.emptyContainer();
+ writeTx.write(nodePath1, nodeToWrite1);
- protected ActorSystem getSystem() {
- return system;
- }
+ YangInstanceIdentifier nodePath2 = PeopleModel.BASE_PATH;
+ NormalizedNode<?, ?> nodeToWrite2 = PeopleModel.emptyContainer();
+ writeTx.write(nodePath2, nodeToWrite2);
- @Test
- public void integrationTest() throws Exception {
- final Configuration configuration = new ConfigurationImpl("module-shards.conf", "modules.conf");
- ShardStrategyFactory.setConfiguration(configuration);
+ DOMStoreThreePhaseCommitCohort cohort = writeTx.ready();
+ Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
+ assertEquals("canCommit", true, canCommit);
+ cohort.preCommit().get(5, TimeUnit.SECONDS);
+ cohort.commit().get(5, TimeUnit.SECONDS);
+ // 5. Verify the data in the store
- new JavaTestKit(getSystem()) {
- {
+ DOMStoreReadTransaction readTx = dataStore.newReadOnlyTransaction();
- new Within(duration("10 seconds")) {
- @Override
- protected void run() {
- try {
- final DistributedDataStore distributedDataStore =
- new DistributedDataStore(getSystem(), "config",
- new MockClusterWrapper(), configuration,
- new DatastoreContext());
+ Optional<NormalizedNode<?, ?>> optional = readTx.read(nodePath1).get();
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", nodeToWrite1, optional.get());
- distributedDataStore.onGlobalContextUpdated(TestModel.createTestContext());
+ optional = readTx.read(nodePath2).get();
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", nodeToWrite2, optional.get());
- // Wait for a specific log message to show up
- final boolean result =
- new JavaTestKit.EventFilter<Boolean>(Logging.Info.class
- ) {
- @Override
- protected Boolean run() {
- return true;
- }
- }.from("akka://test/user/shardmanager-config/member-1-shard-test-1-config")
- .message("Switching from state Candidate to Leader")
- .occurrences(1).exec();
+ cleanup(dataStore);
+ }};
+ }
- assertEquals(true, result);
+ @Test
+ public void testReadWriteTransaction() throws Exception{
+ System.setProperty("shard.persistent", "true");
+ new IntegrationTestKit(getSystem()) {{
+ DistributedDataStore dataStore =
+ setupDistributedDataStore("testReadWriteTransaction", "test-1");
- DOMStoreReadWriteTransaction transaction =
- distributedDataStore.newReadWriteTransaction();
+ // 1. Create a read-write Tx
- transaction
- .write(TestModel.TEST_PATH, ImmutableNodes
- .containerNode(TestModel.TEST_QNAME));
+ DOMStoreReadWriteTransaction readWriteTx = dataStore.newReadWriteTransaction();
+ assertNotNull("newReadWriteTransaction returned null", readWriteTx);
- ListenableFuture<Optional<NormalizedNode<?, ?>>>
- future =
- transaction.read(TestModel.TEST_PATH);
+ // 2. Write some data
- Optional<NormalizedNode<?, ?>> optional =
- future.get();
+ YangInstanceIdentifier nodePath = TestModel.TEST_PATH;
+ NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
+ readWriteTx.write(nodePath, nodeToWrite );
- Assert.assertTrue("Node not found", optional.isPresent());
+ // 3. Read the data from Tx
- NormalizedNode<?, ?> normalizedNode =
- optional.get();
+ Boolean exists = readWriteTx.exists(nodePath).checkedGet(5, TimeUnit.SECONDS);
+ assertEquals("exists", true, exists);
- assertEquals(TestModel.TEST_QNAME,
- normalizedNode.getNodeType());
+ Optional<NormalizedNode<?, ?>> optional = readWriteTx.read(nodePath).get(5, TimeUnit.SECONDS);
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", nodeToWrite, optional.get());
- DOMStoreThreePhaseCommitCohort ready =
- transaction.ready();
+ // 4. Ready the Tx for commit
- ListenableFuture<Boolean> canCommit =
- ready.canCommit();
+ DOMStoreThreePhaseCommitCohort cohort = readWriteTx.ready();
- assertTrue(canCommit.get(5, TimeUnit.SECONDS));
+ // 5. Commit the Tx
- ListenableFuture<Void> preCommit =
- ready.preCommit();
+ Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
+ assertEquals("canCommit", true, canCommit);
+ cohort.preCommit().get(5, TimeUnit.SECONDS);
+ cohort.commit().get(5, TimeUnit.SECONDS);
- preCommit.get(5, TimeUnit.SECONDS);
+ // 6. Verify the data in the store
- ListenableFuture<Void> commit = ready.commit();
+ DOMStoreReadTransaction readTx = dataStore.newReadOnlyTransaction();
- commit.get(5, TimeUnit.SECONDS);
- } catch (ExecutionException | TimeoutException | InterruptedException e){
- fail(e.getMessage());
- }
- }
- };
- }
- };
+ optional = readTx.read(nodePath).get(5, TimeUnit.SECONDS);
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", nodeToWrite, optional.get());
+ cleanup(dataStore);
+ }};
}
@Test
- public void transactionChainIntegrationTest() throws Exception {
- final Configuration configuration = new ConfigurationImpl("module-shards.conf", "modules.conf");
- ShardStrategyFactory.setConfiguration(configuration);
+ public void testTransactionAbort() throws Exception{
+ System.setProperty("shard.persistent", "true");
+ new IntegrationTestKit(getSystem()) {{
+ DistributedDataStore dataStore =
+ setupDistributedDataStore("transactionAbortIntegrationTest", "test-1");
+ DOMStoreWriteTransaction writeTx = dataStore.newWriteOnlyTransaction();
+ assertNotNull("newWriteOnlyTransaction returned null", writeTx);
+ writeTx.write(TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
- new JavaTestKit(getSystem()) {
- {
+ DOMStoreThreePhaseCommitCohort cohort = writeTx.ready();
- new Within(duration("10 seconds")) {
- @Override
- protected void run() {
- try {
- final DistributedDataStore distributedDataStore =
- new DistributedDataStore(getSystem(), "config",
- new MockClusterWrapper(), configuration,
- new DatastoreContext());
+ cohort.canCommit().get(5, TimeUnit.SECONDS);
- distributedDataStore.onGlobalContextUpdated(TestModel.createTestContext());
+ cohort.abort().get(5, TimeUnit.SECONDS);
- // Wait for a specific log message to show up
- final boolean result =
- new JavaTestKit.EventFilter<Boolean>(Logging.Info.class
- ) {
- @Override
- protected Boolean run() {
- return true;
- }
- }.from("akka://test/user/shardmanager-config/member-1-shard-test-1-config")
- .message("Switching from state Candidate to Leader")
- .occurrences(1).exec();
+ testWriteTransaction(dataStore, TestModel.TEST_PATH,
+ ImmutableNodes.containerNode(TestModel.TEST_QNAME));
- assertEquals(true, result);
-
- DOMStoreTransactionChain transactionChain =
- distributedDataStore.createTransactionChain();
-
- DOMStoreReadWriteTransaction transaction =
- transactionChain.newReadWriteTransaction();
+ cleanup(dataStore);
+ }};
+ }
- transaction
- .write(TestModel.TEST_PATH, ImmutableNodes
- .containerNode(TestModel.TEST_QNAME));
+ @Test
+ public void testTransactionChain() throws Exception{
+ System.setProperty("shard.persistent", "true");
+ new IntegrationTestKit(getSystem()) {{
+ DistributedDataStore dataStore =
+ setupDistributedDataStore("transactionChainIntegrationTest", "test-1");
- ListenableFuture<Optional<NormalizedNode<?, ?>>>
- future =
- transaction.read(TestModel.TEST_PATH);
+ // 1. Create a Tx chain and write-only Tx
- Optional<NormalizedNode<?, ?>> optional =
- future.get();
+ DOMStoreTransactionChain txChain = dataStore.createTransactionChain();
- Assert.assertTrue("Node not found", optional.isPresent());
+ DOMStoreWriteTransaction writeTx = txChain.newWriteOnlyTransaction();
+ assertNotNull("newWriteOnlyTransaction returned null", writeTx);
- NormalizedNode<?, ?> normalizedNode =
- optional.get();
+ // 2. Write some data
- assertEquals(TestModel.TEST_QNAME,
- normalizedNode.getNodeType());
+ NormalizedNode<?, ?> containerNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
+ writeTx.write(TestModel.TEST_PATH, containerNode);
- DOMStoreThreePhaseCommitCohort ready =
- transaction.ready();
+ // 3. Ready the Tx for commit
- ListenableFuture<Boolean> canCommit =
- ready.canCommit();
+ DOMStoreThreePhaseCommitCohort cohort = writeTx.ready();
- assertTrue(canCommit.get(5, TimeUnit.SECONDS));
+ // 4. Commit the Tx
- ListenableFuture<Void> preCommit =
- ready.preCommit();
+ Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
+ assertEquals("canCommit", true, canCommit);
+ cohort.preCommit().get(5, TimeUnit.SECONDS);
+ cohort.commit().get(5, TimeUnit.SECONDS);
- preCommit.get(5, TimeUnit.SECONDS);
+ // 5. Verify the data in the store
- ListenableFuture<Void> commit = ready.commit();
+ DOMStoreReadTransaction readTx = txChain.newReadOnlyTransaction();
- commit.get(5, TimeUnit.SECONDS);
+ Optional<NormalizedNode<?, ?>> optional = readTx.read(TestModel.TEST_PATH).get(5, TimeUnit.SECONDS);
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", containerNode, optional.get());
- transactionChain.close();
- } catch (ExecutionException | TimeoutException | InterruptedException e){
- fail(e.getMessage());
- }
- }
- };
- }
- };
+ txChain.close();
+ cleanup(dataStore);
+ }};
}
+ class IntegrationTestKit extends ShardTestKit {
- //FIXME : Disabling test because it's flaky
- //@Test
- public void integrationTestWithMultiShardConfiguration()
- throws ExecutionException, InterruptedException, TimeoutException {
- final Configuration configuration = new ConfigurationImpl("module-shards.conf", "modules.conf");
-
- ShardStrategyFactory.setConfiguration(configuration);
-
- new JavaTestKit(getSystem()) {
- {
+ IntegrationTestKit(ActorSystem actorSystem) {
+ super(actorSystem);
+ }
- new Within(duration("10 seconds")) {
- @Override
- protected void run() {
- try {
- final DistributedDataStore distributedDataStore =
- new DistributedDataStore(getSystem(), "config",
- new MockClusterWrapper(), configuration, null);
+ DistributedDataStore setupDistributedDataStore(String typeName, String... shardNames) {
+ MockClusterWrapper cluster = new MockClusterWrapper();
+ Configuration config = new ConfigurationImpl("module-shards.conf", "modules.conf");
+ ShardStrategyFactory.setConfiguration(config);
+
+ DatastoreContext datastoreContext = DatastoreContext.newBuilder().build();
+ DistributedDataStore dataStore = new DistributedDataStore(getSystem(), typeName, cluster,
+ config, datastoreContext);
+
+ SchemaContext schemaContext = SchemaContextHelper.full();
+ dataStore.onGlobalContextUpdated(schemaContext);
+
+ for(String shardName: shardNames) {
+ ActorRef shard = null;
+ for(int i = 0; i < 20 * 5 && shard == null; i++) {
+ Uninterruptibles.sleepUninterruptibly(50, TimeUnit.MILLISECONDS);
+ Optional<ActorRef> shardReply = dataStore.getActorContext().findLocalShard(shardName);
+ if(shardReply.isPresent()) {
+ shard = shardReply.get();
+ }
+ }
- distributedDataStore.onGlobalContextUpdated(
- SchemaContextHelper.full());
+ assertNotNull("Shard was not created", shard);
- // Wait for a specific log message to show up
- final boolean result =
- new JavaTestKit.EventFilter<Boolean>(
- Logging.Info.class
- ) {
- @Override
- protected Boolean run() {
- return true;
- }
- }.from(
- "akka://test/user/shardmanager-config/member-1-shard-cars-1-config")
- .message(
- "Switching from state Candidate to Leader")
- .occurrences(1)
- .exec();
+ System.out.println("!!!!!!shard: "+shard.path().toString());
+ waitUntilLeader(shard);
+ }
- Thread.sleep(1000);
+ return dataStore;
+ }
+ void testWriteTransaction(DistributedDataStore dataStore, YangInstanceIdentifier nodePath,
+ NormalizedNode<?, ?> nodeToWrite) throws Exception {
- DOMStoreReadWriteTransaction transaction =
- distributedDataStore.newReadWriteTransaction();
+ // 1. Create a write-only Tx
- transaction.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
- transaction.write(PeopleModel.BASE_PATH, PeopleModel.emptyContainer());
+ DOMStoreWriteTransaction writeTx = dataStore.newWriteOnlyTransaction();
+ assertNotNull("newWriteOnlyTransaction returned null", writeTx);
- DOMStoreThreePhaseCommitCohort ready = transaction.ready();
+ // 2. Write some data
- ListenableFuture<Boolean> canCommit = ready.canCommit();
+ writeTx.write(nodePath, nodeToWrite);
- assertTrue(canCommit.get(5, TimeUnit.SECONDS));
+ // 3. Ready the Tx for commit
- ListenableFuture<Void> preCommit = ready.preCommit();
+ DOMStoreThreePhaseCommitCohort cohort = writeTx.ready();
- preCommit.get(5, TimeUnit.SECONDS);
+ // 4. Commit the Tx
- ListenableFuture<Void> commit = ready.commit();
+ Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
+ assertEquals("canCommit", true, canCommit);
+ cohort.preCommit().get(5, TimeUnit.SECONDS);
+ cohort.commit().get(5, TimeUnit.SECONDS);
- commit.get(5, TimeUnit.SECONDS);
+ // 5. Verify the data in the store
- assertEquals(true, result);
- } catch(ExecutionException | TimeoutException | InterruptedException e){
- fail(e.getMessage());
- }
- }
- };
- }
- };
+ DOMStoreReadTransaction readTx = dataStore.newReadOnlyTransaction();
+ Optional<NormalizedNode<?, ?>> optional = readTx.read(nodePath).get(5, TimeUnit.SECONDS);
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", nodeToWrite, optional.get());
+ }
+ void cleanup(DistributedDataStore dataStore) {
+ dataStore.getActorContext().getShardManager().tell(PoisonPill.getInstance(), null);
+ }
}
}
import akka.dispatch.ExecutionContexts;
import akka.dispatch.Futures;
import akka.util.Timeout;
+import com.google.common.base.Optional;
import com.google.common.util.concurrent.MoreExecutors;
import org.junit.After;
import org.junit.Before;
import scala.concurrent.ExecutionContextExecutor;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
-
import java.util.concurrent.TimeUnit;
-
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertNull;
import static org.junit.Assert.assertNotNull;
new DistributedDataStore(actorSystem, "config",
mock(ClusterWrapper.class), mock(Configuration.class),
- new DatastoreContext());
+ DatastoreContext.newBuilder().build());
verify(actorSystem).actorOf(any(Props.class), eq("shardmanager-config"));
}
ListenerRegistration registration =
distributedDataStore.registerChangeListener(TestModel.TEST_PATH, new AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>() {
- @Override
- public void onDataChanged(AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
- throw new UnsupportedOperationException("onDataChanged");
- }
- }, AsyncDataBroker.DataChangeScope.BASE);
+ @Override
+ public void onDataChanged(AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
+ throw new UnsupportedOperationException("onDataChanged");
+ }
+ }, AsyncDataBroker.DataChangeScope.BASE);
// Since we do not expect the shard to be local registration will return a NoOpRegistration
assertTrue(registration instanceof NoOpDataChangeListenerRegistration);
Future future = mock(Future.class);
when(actorContext.getOperationDuration()).thenReturn(FiniteDuration.apply(5, TimeUnit.SECONDS));
when(actorContext.getActorSystem()).thenReturn(getSystem());
+ when(actorContext.findLocalShard(anyString())).thenReturn(Optional.of(doNothingActorRef));
when(actorContext
- .executeLocalShardOperationAsync(anyString(), anyObject(), any(Timeout.class))).thenReturn(future);
+ .executeOperationAsync(eq(doNothingActorRef), anyObject(), any(Timeout.class))).thenReturn(future);
ListenerRegistration registration =
distributedDataStore.registerChangeListener(TestModel.TEST_PATH,
when(actorSystem.dispatcher()).thenReturn(executor);
when(actorSystem.actorOf(any(Props.class))).thenReturn(doNothingActorRef);
when(actorContext.getActorSystem()).thenReturn(actorSystem);
+ when(actorContext.findLocalShard(anyString())).thenReturn(Optional.of(doNothingActorRef));
when(actorContext
- .executeLocalShardOperationAsync(anyString(), anyObject(), any(Timeout.class))).thenReturn(f);
+ .executeOperationAsync(eq(doNothingActorRef), anyObject(), any(Timeout.class))).thenReturn(f);
when(actorContext.actorSelection(any(ActorPath.class))).thenReturn(actorSelection);
ListenerRegistration registration =
when(actorSystem.dispatcher()).thenReturn(executor);
when(actorSystem.actorOf(any(Props.class))).thenReturn(doNothingActorRef);
when(actorContext.getActorSystem()).thenReturn(actorSystem);
+ when(actorContext.findLocalShard(anyString())).thenReturn(Optional.of(doNothingActorRef));
when(actorContext
- .executeLocalShardOperationAsync(anyString(), anyObject(), any(Timeout.class))).thenReturn(f);
+ .executeOperationAsync(eq(doNothingActorRef), anyObject(), any(Timeout.class))).thenReturn(f);
when(actorContext.actorSelection(any(ActorPath.class))).thenReturn(actorSelection);
ListenerRegistration registration =
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
+import org.opendaylight.controller.cluster.datastore.messages.ActorInitialized;
import org.opendaylight.controller.cluster.datastore.messages.FindLocalShard;
import org.opendaylight.controller.cluster.datastore.messages.FindPrimary;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
import org.opendaylight.controller.cluster.datastore.messages.PrimaryFound;
import org.opendaylight.controller.cluster.datastore.messages.PrimaryNotFound;
import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext;
+import org.opendaylight.controller.cluster.datastore.utils.DoNothingActor;
import org.opendaylight.controller.cluster.datastore.utils.MockClusterWrapper;
import org.opendaylight.controller.cluster.datastore.utils.MockConfiguration;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
public class ShardManagerTest {
private static ActorSystem system;
+ Configuration mockConfig = new MockConfiguration();
+ private static ActorRef defaultShardMockActor;
@BeforeClass
public static void setUpClass() {
myJournal.put("class", "org.opendaylight.controller.cluster.datastore.ShardManagerTest$MyJournal");
myJournal.put("plugin-dispatcher", "akka.actor.default-dispatcher");
Config config = ConfigFactory.load()
- .withValue("akka.persistence.journal.plugin",
- ConfigValueFactory.fromAnyRef("my-journal"))
- .withValue("my-journal", ConfigValueFactory.fromMap(myJournal));
+ .withValue("akka.persistence.journal.plugin",
+ ConfigValueFactory.fromAnyRef("my-journal"))
+ .withValue("my-journal", ConfigValueFactory.fromMap(myJournal));
MyJournal.clear();
system = ActorSystem.create("test", config);
+
+ String name = new ShardIdentifier(Shard.DEFAULT_NAME, "member-1","config").toString();
+ defaultShardMockActor = system.actorOf(Props.create(DoNothingActor.class), name);
+
+
}
@AfterClass
new JavaTestKit(system) {
{
final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ .props("config", new MockClusterWrapper(),
+ new MockConfiguration(), DatastoreContext.newBuilder().build());
final ActorRef subject = getSystem().actorOf(props);
subject.tell(new FindPrimary("inventory").toSerializable(), getRef());
expectMsgEquals(duration("2 seconds"),
- new PrimaryNotFound("inventory").toSerializable());
+ new PrimaryNotFound("inventory").toSerializable());
}};
}
new JavaTestKit(system) {{
final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ .props("config", new MockClusterWrapper(),
+ new MockConfiguration(), DatastoreContext.newBuilder().build());
final ActorRef subject = getSystem().actorOf(props);
subject.tell(new UpdateSchemaContext(TestModel.createTestContext()), getRef());
+ subject.tell(new ActorInitialized(), defaultShardMockActor);
subject.tell(new FindPrimary(Shard.DEFAULT_NAME).toSerializable(), getRef());
expectMsgClass(duration("1 seconds"), PrimaryFound.SERIALIZABLE_CLASS);
- }};
+ }
+ };
}
@Test
new JavaTestKit(system) {{
final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ .props("config", new MockClusterWrapper(),
+ new MockConfiguration(), DatastoreContext.newBuilder().build());
final ActorRef subject = getSystem().actorOf(props);
new JavaTestKit(system) {{
final Props props = ShardManager
- .props("config", mockClusterWrapper,
- new MockConfiguration(), new DatastoreContext());
+ .props("config", mockClusterWrapper,
+ new MockConfiguration(), DatastoreContext.newBuilder().build());
final ActorRef subject = getSystem().actorOf(props);
subject.tell(new UpdateSchemaContext(TestModel.createTestContext()), getRef());
+ subject.tell(new ActorInitialized(), defaultShardMockActor);
subject.tell(new FindLocalShard(Shard.DEFAULT_NAME), getRef());
}.get(); // this extracts the received message
assertTrue(out.path().toString(),
- out.path().toString().contains("member-1-shard-default-config"));
+ out.path().toString().contains("member-1-shard-default-config"));
}};
}
new JavaTestKit(system) {{
final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ .props("config", new MockClusterWrapper(),
+ new MockConfiguration(), DatastoreContext.newBuilder().build());
final ActorRef subject = getSystem().actorOf(props);
new JavaTestKit(system) {{
final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ .props("config", new MockClusterWrapper(),
+ new MockConfiguration(), DatastoreContext.newBuilder().build());
final ActorRef subject = getSystem().actorOf(props);
@Test
public void testOnRecoveryJournalIsEmptied(){
MyJournal.addToJournal(1L, new ShardManager.SchemaContextModules(
- ImmutableSet.of("foo")));
+ ImmutableSet.of("foo")));
assertEquals(1, MyJournal.get().size());
new JavaTestKit(system) {{
final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ .props("config", new MockClusterWrapper(),
+ new MockConfiguration(), DatastoreContext.newBuilder().build());
final ActorRef subject = getSystem().actorOf(props);
public void testOnRecoveryPreviouslyKnownModulesAreDiscovered() throws Exception {
new JavaTestKit(system) {{
final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ .props("config", new MockClusterWrapper(),
+ new MockConfiguration(), DatastoreContext.newBuilder().build());
final TestActorRef<ShardManager> subject =
- TestActorRef.create(system, props);
+ TestActorRef.create(system, props);
subject.underlyingActor().onReceiveRecover(new ShardManager.SchemaContextModules(ImmutableSet.of("foo")));
@Test
public void testOnUpdateSchemaContextUpdateKnownModulesIfTheyContainASuperSetOfTheKnownModules()
- throws Exception {
+ throws Exception {
new JavaTestKit(system) {{
final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ .props("config", new MockClusterWrapper(),
+ new MockConfiguration(), DatastoreContext.newBuilder().build());
final TestActorRef<ShardManager> subject =
- TestActorRef.create(system, props);
+ TestActorRef.create(system, props);
Collection<String> knownModules = subject.underlyingActor().getKnownModules();
@Test
public void testOnUpdateSchemaContextDoNotUpdateKnownModulesIfTheyDoNotContainASuperSetOfKnownModules()
- throws Exception {
+ throws Exception {
new JavaTestKit(system) {{
final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ .props("config", new MockClusterWrapper(),
+ new MockConfiguration(), DatastoreContext.newBuilder().build());
final TestActorRef<ShardManager> subject =
- TestActorRef.create(system, props);
+ TestActorRef.create(system, props);
Collection<String> knownModules = subject.underlyingActor().getKnownModules();
}
@Override public Future<Void> doAsyncReplayMessages(final String persistenceId, long fromSequenceNr, long toSequenceNr, long max,
- final Procedure<PersistentRepr> replayCallback) {
+ final Procedure<PersistentRepr> replayCallback) {
if(journal.size() == 0){
return Futures.successful(null);
}
public Void call() throws Exception {
for (Map.Entry<Long, Object> entry : journal.entrySet()) {
PersistentRepr persistentMessage =
- new PersistentImpl(entry.getValue(), entry.getKey(), persistenceId,
- false, null, null);
+ new PersistentImpl(entry.getValue(), entry.getKey(), persistenceId,
+ false, null, null);
replayCallback.apply(persistentMessage);
}
return null;
}
@Override public Future<Void> doAsyncWriteMessages(
- final Iterable<PersistentRepr> persistentReprs) {
+ final Iterable<PersistentRepr> persistentReprs) {
return Futures.future(new Callable<Void>() {
@Override
public Void call() throws Exception {
}
@Override public Future<Void> doAsyncWriteConfirmations(
- Iterable<PersistentConfirmation> persistentConfirmations) {
+ Iterable<PersistentConfirmation> persistentConfirmations) {
return Futures.successful(null);
}
@Override public Future<Void> doAsyncDeleteMessages(Iterable<PersistentId> persistentIds,
- boolean b) {
+ boolean b) {
clear();
return Futures.successful(null);
}
package org.opendaylight.controller.cluster.datastore;
import akka.actor.ActorRef;
-import akka.actor.ActorSystem;
import akka.actor.Props;
-import akka.event.Logging;
+import akka.dispatch.Dispatchers;
+import akka.dispatch.OnComplete;
import akka.japi.Creator;
+import akka.pattern.Patterns;
import akka.testkit.JavaTestKit;
import akka.testkit.TestActorRef;
+import akka.util.Timeout;
+import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.util.concurrent.CheckedFuture;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import org.junit.After;
-import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
+import org.mockito.InOrder;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
import org.opendaylight.controller.cluster.datastore.messages.EnableNotification;
-import org.opendaylight.controller.cluster.datastore.messages.ForwardedCommitTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.ForwardedReadyTransaction;
import org.opendaylight.controller.cluster.datastore.messages.PeerAddressResolved;
+import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListener;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListenerReply;
import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import scala.concurrent.duration.Duration;
+import scala.concurrent.Await;
+import scala.concurrent.Future;
+import scala.concurrent.duration.FiniteDuration;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.inOrder;
public class ShardTest extends AbstractActorTest {
- private static final DatastoreContext DATA_STORE_CONTEXT =
- new DatastoreContext("", null, Duration.create(10, TimeUnit.MINUTES), 5, 3, 5000, 500);
-
private static final SchemaContext SCHEMA_CONTEXT = TestModel.createTestContext();
private static final ShardIdentifier IDENTIFIER = ShardIdentifier.builder().memberName("member-1")
.shardName("inventory").type("config").build();
+ private static final AtomicInteger NEXT_SHARD_NUM = new AtomicInteger();
+
+ private static String shardName() {
+ return "shard" + NEXT_SHARD_NUM.getAndIncrement();
+ }
+
+ private DatastoreContext dataStoreContext = DatastoreContext.newBuilder().
+ shardJournalRecoveryLogBatchSize(3).shardSnapshotBatchCount(5000).build();
+
@Before
public void setUp() {
System.setProperty("shard.persistent", "false");
private Props newShardProps() {
return Shard.props(IDENTIFIER, Collections.<ShardIdentifier,String>emptyMap(),
- DATA_STORE_CONTEXT, SCHEMA_CONTEXT);
+ dataStoreContext, SCHEMA_CONTEXT);
}
@Test
Props props = Shard.props(identifier,
Collections.<ShardIdentifier, String>singletonMap(identifier, null),
- DATA_STORE_CONTEXT, SCHEMA_CONTEXT);
+ dataStoreContext, SCHEMA_CONTEXT);
final ActorRef subject = getSystem().actorOf(props, "testPeerAddressResolved");
new Within(duration("3 seconds")) {
NormalizedNodeToNodeCodec codec =
new NormalizedNodeToNodeCodec(SCHEMA_CONTEXT);
- ref.underlyingActor().writeToStore(TestModel.TEST_PATH, ImmutableNodes.containerNode(
- TestModel.TEST_QNAME));
+ writeToStore(ref, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
YangInstanceIdentifier root = YangInstanceIdentifier.builder().build();
- NormalizedNode<?,?> expected = ref.underlyingActor().readStore(root);
+ NormalizedNode<?,?> expected = readStore(ref, root);
NormalizedNodeMessages.Container encode = codec.encode(expected);
ref.underlyingActor().onReceiveCommand(applySnapshot);
- NormalizedNode<?,?> actual = ref.underlyingActor().readStore(root);
+ NormalizedNode<?,?> actual = readStore(ref, root);
assertEquals(expected, actual);
}
shard.underlyingActor().onReceiveCommand(applyState);
- NormalizedNode<?,?> actual = shard.underlyingActor().readStore(TestModel.TEST_PATH);
+ NormalizedNode<?,?> actual = readStore(shard, TestModel.TEST_PATH);
assertEquals("Applied state", node, actual);
}
@Override
public Shard create() throws Exception {
return new Shard(IDENTIFIER, Collections.<ShardIdentifier,String>emptyMap(),
- DATA_STORE_CONTEXT, SCHEMA_CONTEXT) {
+ dataStoreContext, SCHEMA_CONTEXT) {
@Override
protected void onRecoveryComplete() {
try {
// Verify data in the data store.
- NormalizedNode<?, ?> outerList = shard.underlyingActor().readStore(TestModel.OUTER_LIST_PATH);
+ NormalizedNode<?, ?> outerList = readStore(shard, TestModel.OUTER_LIST_PATH);
assertNotNull(TestModel.OUTER_LIST_QNAME.getLocalName() + " not found", outerList);
assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " value is not Iterable",
outerList.getValue() instanceof Iterable);
return new CompositeModificationPayload(compMod.toSerializable());
}
- @SuppressWarnings("unchecked")
+ private DOMStoreThreePhaseCommitCohort setupMockWriteTransaction(String cohortName,
+ InMemoryDOMDataStore dataStore, YangInstanceIdentifier path, NormalizedNode data,
+ MutableCompositeModification modification) {
+ return setupMockWriteTransaction(cohortName, dataStore, path, data, modification, null);
+ }
+
+ private DOMStoreThreePhaseCommitCohort setupMockWriteTransaction(String cohortName,
+ InMemoryDOMDataStore dataStore, YangInstanceIdentifier path, NormalizedNode data,
+ MutableCompositeModification modification,
+ final Function<DOMStoreThreePhaseCommitCohort,ListenableFuture<Void>> preCommit) {
+
+ DOMStoreWriteTransaction tx = dataStore.newWriteOnlyTransaction();
+ tx.write(path, data);
+ final DOMStoreThreePhaseCommitCohort realCohort = tx.ready();
+ DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class, cohortName);
+
+ doAnswer(new Answer<ListenableFuture<Boolean>>() {
+ @Override
+ public ListenableFuture<Boolean> answer(InvocationOnMock invocation) {
+ return realCohort.canCommit();
+ }
+ }).when(cohort).canCommit();
+
+ doAnswer(new Answer<ListenableFuture<Void>>() {
+ @Override
+ public ListenableFuture<Void> answer(InvocationOnMock invocation) throws Throwable {
+ if(preCommit != null) {
+ return preCommit.apply(realCohort);
+ } else {
+ return realCohort.preCommit();
+ }
+ }
+ }).when(cohort).preCommit();
+
+ doAnswer(new Answer<ListenableFuture<Void>>() {
+ @Override
+ public ListenableFuture<Void> answer(InvocationOnMock invocation) throws Throwable {
+ return realCohort.commit();
+ }
+ }).when(cohort).commit();
+
+ doAnswer(new Answer<ListenableFuture<Void>>() {
+ @Override
+ public ListenableFuture<Void> answer(InvocationOnMock invocation) throws Throwable {
+ return realCohort.abort();
+ }
+ }).when(cohort).abort();
+
+ modification.addModification(new WriteModification(path, data, SCHEMA_CONTEXT));
+
+ return cohort;
+ }
+
+ @SuppressWarnings({ "unchecked" })
@Test
- public void testForwardedCommitTransactionWithPersistence() throws IOException {
+ public void testConcurrentThreePhaseCommits() throws Throwable {
System.setProperty("shard.persistent", "true");
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ // Setup 3 simulated transactions with mock cohorts backed by real cohorts.
+
+ InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore();
+
+ String transactionID1 = "tx1";
+ MutableCompositeModification modification1 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort1 = setupMockWriteTransaction("cohort1", dataStore,
+ TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), modification1);
+
+ String transactionID2 = "tx2";
+ MutableCompositeModification modification2 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort2 = setupMockWriteTransaction("cohort2", dataStore,
+ TestModel.OUTER_LIST_PATH,
+ ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(),
+ modification2);
+
+ String transactionID3 = "tx3";
+ MutableCompositeModification modification3 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort3 = setupMockWriteTransaction("cohort3", dataStore,
+ YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH)
+ .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1).build(),
+ ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1),
+ modification3);
+
+ long timeoutSec = 5;
+ final FiniteDuration duration = FiniteDuration.create(timeoutSec, TimeUnit.SECONDS);
+ final Timeout timeout = new Timeout(duration);
+
+ // Simulate the ForwardedReadyTransaction message for the first Tx that would be sent
+ // by the ShardTransaction.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1), getRef());
+ ReadyTransactionReply readyReply = ReadyTransactionReply.fromSerializable(
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Cohort path", shard.path().toString(), readyReply.getCohortPath());
+
+ // Send the CanCommitTransaction message for the first Tx.
+
+ shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ // Send the ForwardedReadyTransaction for the next 2 Tx's.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(new ForwardedReadyTransaction(transactionID3, cohort3, modification3), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ // Send the CanCommitTransaction message for the next 2 Tx's. These should get queued and
+ // processed after the first Tx completes.
+
+ Future<Object> canCommitFuture1 = Patterns.ask(shard,
+ new CanCommitTransaction(transactionID2).toSerializable(), timeout);
+
+ Future<Object> canCommitFuture2 = Patterns.ask(shard,
+ new CanCommitTransaction(transactionID3).toSerializable(), timeout);
+
+ // Send the CommitTransaction message for the first Tx. After it completes, it should
+ // trigger the 2nd Tx to proceed which should in turn then trigger the 3rd.
+
+ shard.tell(new CommitTransaction(transactionID1).toSerializable(), getRef());
+ expectMsgClass(duration, CommitTransactionReply.SERIALIZABLE_CLASS);
+
+ // Wait for the next 2 Tx's to complete.
+
+ final AtomicReference<Throwable> caughtEx = new AtomicReference<>();
+ final CountDownLatch commitLatch = new CountDownLatch(2);
+
+ class OnFutureComplete extends OnComplete<Object> {
+ private final Class<?> expRespType;
+
+ OnFutureComplete(Class<?> expRespType) {
+ this.expRespType = expRespType;
+ }
+
+ @Override
+ public void onComplete(Throwable error, Object resp) {
+ if(error != null) {
+ System.out.println(new java.util.Date()+": "+getClass().getSimpleName() + " failure: "+error);
+ caughtEx.set(new AssertionError(getClass().getSimpleName() + " failure", error));
+ } else {
+ try {
+ assertEquals("Commit response type", expRespType, resp.getClass());
+ onSuccess(resp);
+ } catch (Exception e) {
+ caughtEx.set(e);
+ }
+ }
+ }
+
+ void onSuccess(Object resp) throws Exception {
+ }
+ }
+
+ class OnCommitFutureComplete extends OnFutureComplete {
+ OnCommitFutureComplete() {
+ super(CommitTransactionReply.SERIALIZABLE_CLASS);
+ }
+
+ @Override
+ public void onComplete(Throwable error, Object resp) {
+ super.onComplete(error, resp);
+ commitLatch.countDown();
+ }
+ }
+
+ class OnCanCommitFutureComplete extends OnFutureComplete {
+ private final String transactionID;
+ OnCanCommitFutureComplete(String transactionID) {
+ super(CanCommitTransactionReply.SERIALIZABLE_CLASS);
+ this.transactionID = transactionID;
+ }
+
+ @Override
+ void onSuccess(Object resp) throws Exception {
+ CanCommitTransactionReply canCommitReply =
+ CanCommitTransactionReply.fromSerializable(resp);
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ Future<Object> commitFuture = Patterns.ask(shard,
+ new CommitTransaction(transactionID).toSerializable(), timeout);
+ commitFuture.onComplete(new OnCommitFutureComplete(), getSystem().dispatcher());
+ }
+ }
+
+ canCommitFuture1.onComplete(new OnCanCommitFutureComplete(transactionID2),
+ getSystem().dispatcher());
+
+ canCommitFuture2.onComplete(new OnCanCommitFutureComplete(transactionID3),
+ getSystem().dispatcher());
+
+ boolean done = commitLatch.await(timeoutSec, TimeUnit.SECONDS);
+
+ if(caughtEx.get() != null) {
+ throw caughtEx.get();
+ }
+
+ assertEquals("Commits complete", true, done);
+
+ InOrder inOrder = inOrder(cohort1, cohort2, cohort3);
+ inOrder.verify(cohort1).canCommit();
+ inOrder.verify(cohort1).preCommit();
+ inOrder.verify(cohort1).commit();
+ inOrder.verify(cohort2).canCommit();
+ inOrder.verify(cohort2).preCommit();
+ inOrder.verify(cohort2).commit();
+ inOrder.verify(cohort3).canCommit();
+ inOrder.verify(cohort3).preCommit();
+ inOrder.verify(cohort3).commit();
+
+ // Verify data in the data store.
+
+ NormalizedNode<?, ?> outerList = readStore(shard, TestModel.OUTER_LIST_PATH);
+ assertNotNull(TestModel.OUTER_LIST_QNAME.getLocalName() + " not found", outerList);
+ assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " value is not Iterable",
+ outerList.getValue() instanceof Iterable);
+ Object entry = ((Iterable<Object>)outerList.getValue()).iterator().next();
+ assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " entry is not MapEntryNode",
+ entry instanceof MapEntryNode);
+ MapEntryNode mapEntry = (MapEntryNode)entry;
+ Optional<DataContainerChild<? extends PathArgument, ?>> idLeaf =
+ mapEntry.getChild(new YangInstanceIdentifier.NodeIdentifier(TestModel.ID_QNAME));
+ assertTrue("Missing leaf " + TestModel.ID_QNAME.getLocalName(), idLeaf.isPresent());
+ assertEquals(TestModel.ID_QNAME.getLocalName() + " value", 1, idLeaf.get().getValue());
+
+ assertEquals("Last log index", 2, shard.underlyingActor().getShardMBean().getLastLogIndex());
+ }};
+ }
+
+ @Test
+ public void testCommitPhaseFailure() throws Throwable {
new ShardTestKit(getSystem()) {{
- TestActorRef<Shard> shard = TestActorRef.create(getSystem(), newShardProps());
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
waitUntilLeader(shard);
- NormalizedNode<?, ?> node = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
+ // Setup 2 simulated transactions with mock cohorts. The first one fails in the
+ // commit phase.
+
+ String transactionID1 = "tx1";
+ MutableCompositeModification modification1 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort1 = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1");
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort1).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(cohort1).preCommit();
+ doReturn(Futures.immediateFailedFuture(new IllegalStateException("mock"))).when(cohort1).commit();
+
+ String transactionID2 = "tx2";
+ MutableCompositeModification modification2 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort2 = mock(DOMStoreThreePhaseCommitCohort.class, "cohort2");
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort2).canCommit();
+
+ FiniteDuration duration = duration("5 seconds");
+ final Timeout timeout = new Timeout(duration);
+
+ // Simulate the ForwardedReadyTransaction messages that would be sent
+ // by the ShardTransaction.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
- DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class);
- doReturn(Futures.immediateFuture(null)).when(cohort).commit();
+ shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ // Send the CanCommitTransaction message for the first Tx.
+
+ shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ // Send the CanCommitTransaction message for the 2nd Tx. This should get queued and
+ // processed after the first Tx completes.
+
+ Future<Object> canCommitFuture = Patterns.ask(shard,
+ new CanCommitTransaction(transactionID2).toSerializable(), timeout);
+
+ // Send the CommitTransaction message for the first Tx. This should send back an error
+ // and trigger the 2nd Tx to proceed.
+
+ shard.tell(new CommitTransaction(transactionID1).toSerializable(), getRef());
+ expectMsgClass(duration, akka.actor.Status.Failure.class);
+
+ // Wait for the 2nd Tx to complete the canCommit phase.
+
+ final CountDownLatch latch = new CountDownLatch(1);
+ canCommitFuture.onComplete(new OnComplete<Object>() {
+ @Override
+ public void onComplete(Throwable t, Object resp) {
+ latch.countDown();
+ }
+ }, getSystem().dispatcher());
+
+ assertEquals("2nd CanCommit complete", true, latch.await(5, TimeUnit.SECONDS));
+
+ InOrder inOrder = inOrder(cohort1, cohort2);
+ inOrder.verify(cohort1).canCommit();
+ inOrder.verify(cohort1).preCommit();
+ inOrder.verify(cohort1).commit();
+ inOrder.verify(cohort2).canCommit();
+ }};
+ }
+
+ @Test
+ public void testPreCommitPhaseFailure() throws Throwable {
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ String transactionID = "tx1";
MutableCompositeModification modification = new MutableCompositeModification();
- modification.addModification(new WriteModification(TestModel.TEST_PATH, node,
- SCHEMA_CONTEXT));
+ DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1");
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort).canCommit();
+ doReturn(Futures.immediateFailedFuture(new IllegalStateException("mock"))).when(cohort).preCommit();
+
+ FiniteDuration duration = duration("5 seconds");
+
+ // Simulate the ForwardedReadyTransaction messages that would be sent
+ // by the ShardTransaction.
- shard.tell(new ForwardedCommitTransaction(cohort, modification), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID, cohort, modification), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
- expectMsgClass(duration("5 seconds"), CommitTransactionReply.SERIALIZABLE_CLASS);
+ // Send the CanCommitTransaction message.
- verify(cohort).commit();
+ shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
- assertEquals("Last log index", 0, shard.underlyingActor().getShardMBean().getLastLogIndex());
+ // Send the CommitTransaction message. This should send back an error
+ // for preCommit failure.
+
+ shard.tell(new CommitTransaction(transactionID).toSerializable(), getRef());
+ expectMsgClass(duration, akka.actor.Status.Failure.class);
+
+ InOrder inOrder = inOrder(cohort);
+ inOrder.verify(cohort).canCommit();
+ inOrder.verify(cohort).preCommit();
+ }};
+ }
+
+ @Test
+ public void testCanCommitPhaseFailure() throws Throwable {
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ final FiniteDuration duration = duration("5 seconds");
+
+ String transactionID = "tx1";
+ MutableCompositeModification modification = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1");
+ doReturn(Futures.immediateFailedFuture(new IllegalStateException("mock"))).when(cohort).canCommit();
+
+ // Simulate the ForwardedReadyTransaction messages that would be sent
+ // by the ShardTransaction.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID, cohort, modification), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ // Send the CanCommitTransaction message.
+
+ shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
+ expectMsgClass(duration, akka.actor.Status.Failure.class);
+ }};
+ }
+
+ @Test
+ public void testAbortBeforeFinishCommit() throws Throwable {
+ System.setProperty("shard.persistent", "true");
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ final FiniteDuration duration = duration("5 seconds");
+ final Timeout timeout = new Timeout(duration);
+
+ InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore();
+
+ final String transactionID = "tx1";
+ final CountDownLatch abortComplete = new CountDownLatch(1);
+ Function<DOMStoreThreePhaseCommitCohort,ListenableFuture<Void>> preCommit =
+ new Function<DOMStoreThreePhaseCommitCohort,ListenableFuture<Void>>() {
+ @Override
+ public ListenableFuture<Void> apply(final DOMStoreThreePhaseCommitCohort cohort) {
+ ListenableFuture<Void> preCommitFuture = cohort.preCommit();
+
+ Future<Object> abortFuture = Patterns.ask(shard,
+ new AbortTransaction(transactionID).toSerializable(), timeout);
+ abortFuture.onComplete(new OnComplete<Object>() {
+ @Override
+ public void onComplete(Throwable e, Object resp) {
+ abortComplete.countDown();
+ }
+ }, getSystem().dispatcher());
+
+ return preCommitFuture;
+ }
+ };
+
+ MutableCompositeModification modification = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort = setupMockWriteTransaction("cohort1", dataStore,
+ TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME),
+ modification, preCommit);
+
+ shard.tell(new ForwardedReadyTransaction(transactionID, cohort, modification), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ Future<Object> commitFuture = Patterns.ask(shard,
+ new CommitTransaction(transactionID).toSerializable(), timeout);
+
+ assertEquals("Abort complete", true, abortComplete.await(5, TimeUnit.SECONDS));
+
+ Await.result(commitFuture, duration);
+
+ NormalizedNode<?, ?> node = readStore(shard, TestModel.TEST_PATH);
+ assertNotNull(TestModel.TEST_QNAME.getLocalName() + " not found", node);
+ }};
+ }
+
+ @Test
+ public void testTransactionCommitTimeout() throws Throwable {
+ dataStoreContext = DatastoreContext.newBuilder().shardTransactionCommitTimeoutInSeconds(1).build();
+
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ final FiniteDuration duration = duration("5 seconds");
+
+ InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore();
+
+ writeToStore(shard, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
+ writeToStore(shard, TestModel.OUTER_LIST_PATH,
+ ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build());
+
+ // Create 1st Tx - will timeout
+
+ String transactionID1 = "tx1";
+ MutableCompositeModification modification1 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort1 = setupMockWriteTransaction("cohort1", dataStore,
+ YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH)
+ .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1).build(),
+ ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1),
+ modification1);
+
+ // Create 2nd Tx
+
+ String transactionID2 = "tx3";
+ MutableCompositeModification modification2 = new MutableCompositeModification();
+ YangInstanceIdentifier listNodePath = YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH)
+ .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 2).build();
+ DOMStoreThreePhaseCommitCohort cohort2 = setupMockWriteTransaction("cohort3", dataStore,
+ listNodePath,
+ ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 2),
+ modification2);
+
+ // Ready the Tx's
+
+ shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ // canCommit 1st Tx. We don't send the commit so it should timeout.
+
+ shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef());
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS);
+
+ // canCommit the 2nd Tx - it should complete after the 1st Tx times out.
+
+ shard.tell(new CanCommitTransaction(transactionID2).toSerializable(), getRef());
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS);
+
+ // Commit the 2nd Tx.
+
+ shard.tell(new CommitTransaction(transactionID2).toSerializable(), getRef());
+ expectMsgClass(duration, CommitTransactionReply.SERIALIZABLE_CLASS);
+
+ NormalizedNode<?, ?> node = readStore(shard, listNodePath);
+ assertNotNull(listNodePath + " not found", node);
+ }};
+ }
+
+ @Test
+ public void testTransactionCommitQueueCapacityExceeded() throws Throwable {
+ dataStoreContext = DatastoreContext.newBuilder().shardTransactionCommitQueueCapacity(1).build();
+
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ final FiniteDuration duration = duration("5 seconds");
+
+ InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore();
+
+ String transactionID1 = "tx1";
+ MutableCompositeModification modification1 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort1 = setupMockWriteTransaction("cohort1", dataStore,
+ TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), modification1);
+
+ String transactionID2 = "tx2";
+ MutableCompositeModification modification2 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort2 = setupMockWriteTransaction("cohort2", dataStore,
+ TestModel.OUTER_LIST_PATH,
+ ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(),
+ modification2);
+
+ String transactionID3 = "tx3";
+ MutableCompositeModification modification3 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort3 = setupMockWriteTransaction("cohort3", dataStore,
+ TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), modification3);
+
+ // Ready the Tx's
+
+ shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(new ForwardedReadyTransaction(transactionID3, cohort3, modification3), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ // canCommit 1st Tx.
+
+ shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef());
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS);
+
+ // canCommit the 2nd Tx - it should get queued.
+
+ shard.tell(new CanCommitTransaction(transactionID2).toSerializable(), getRef());
+
+ // canCommit the 3rd Tx - should exceed queue capacity and fail.
+
+ shard.tell(new CanCommitTransaction(transactionID3).toSerializable(), getRef());
+ expectMsgClass(duration, akka.actor.Status.Failure.class);
+ }};
+ }
+
+ @Test
+ public void testCanCommitBeforeReadyFailure() throws Throwable {
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ shard.tell(new CanCommitTransaction("tx").toSerializable(), getRef());
+ expectMsgClass(duration("5 seconds"), akka.actor.Status.Failure.class);
+ }};
+ }
+
+ @Test
+ public void testAbortTransaction() throws Throwable {
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ // Setup 2 simulated transactions with mock cohorts. The first one will be aborted.
+
+ String transactionID1 = "tx1";
+ MutableCompositeModification modification1 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort1 = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1");
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort1).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(cohort1).abort();
+
+ String transactionID2 = "tx2";
+ MutableCompositeModification modification2 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort2 = mock(DOMStoreThreePhaseCommitCohort.class, "cohort2");
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort2).canCommit();
+
+ FiniteDuration duration = duration("5 seconds");
+ final Timeout timeout = new Timeout(duration);
+
+ // Simulate the ForwardedReadyTransaction messages that would be sent
+ // by the ShardTransaction.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ // Send the CanCommitTransaction message for the first Tx.
+
+ shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ // Send the CanCommitTransaction message for the 2nd Tx. This should get queued and
+ // processed after the first Tx completes.
+
+ Future<Object> canCommitFuture = Patterns.ask(shard,
+ new CanCommitTransaction(transactionID2).toSerializable(), timeout);
+
+ // Send the AbortTransaction message for the first Tx. This should trigger the 2nd
+ // Tx to proceed.
+
+ shard.tell(new AbortTransaction(transactionID1).toSerializable(), getRef());
+ expectMsgClass(duration, AbortTransactionReply.SERIALIZABLE_CLASS);
+
+ // Wait for the 2nd Tx to complete the canCommit phase.
+
+ final CountDownLatch latch = new CountDownLatch(1);
+ canCommitFuture.onComplete(new OnComplete<Object>() {
+ @Override
+ public void onComplete(Throwable t, Object resp) {
+ latch.countDown();
+ }
+ }, getSystem().dispatcher());
+
+ assertEquals("2nd CanCommit complete", true, latch.await(5, TimeUnit.SECONDS));
+
+ InOrder inOrder = inOrder(cohort1, cohort2);
+ inOrder.verify(cohort1).canCommit();
+ inOrder.verify(cohort2).canCommit();
}};
}
@Test
public void testCreateSnapshot() throws IOException, InterruptedException {
new ShardTestKit(getSystem()) {{
- final ActorRef subject = getSystem().actorOf(newShardProps(), "testCreateSnapshot");
+ final AtomicReference<CountDownLatch> latch = new AtomicReference<>(new CountDownLatch(1));
+ Creator<Shard> creator = new Creator<Shard>() {
+ @Override
+ public Shard create() throws Exception {
+ return new Shard(IDENTIFIER, Collections.<ShardIdentifier,String>emptyMap(),
+ dataStoreContext, SCHEMA_CONTEXT) {
+ @Override
+ public void saveSnapshot(Object snapshot) {
+ super.saveSnapshot(snapshot);
+ latch.get().countDown();
+ }
+ };
+ }
+ };
- waitUntilLeader(subject);
+ TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ Props.create(new DelegatingShardCreator(creator)), "testCreateSnapshot");
- subject.tell(new CaptureSnapshot(-1,-1,-1,-1), getRef());
+ waitUntilLeader(shard);
+
+ shard.tell(new CaptureSnapshot(-1,-1,-1,-1), getRef());
- waitForLogMessage(Logging.Info.class, subject, "CaptureSnapshotReply received by actor");
+ assertEquals("Snapshot saved", true, latch.get().await(5, TimeUnit.SECONDS));
- subject.tell(new CaptureSnapshot(-1,-1,-1,-1), getRef());
+ latch.set(new CountDownLatch(1));
+ shard.tell(new CaptureSnapshot(-1,-1,-1,-1), getRef());
- waitForLogMessage(Logging.Info.class, subject, "CaptureSnapshotReply received by actor");
+ assertEquals("Snapshot saved", true, latch.get().await(5, TimeUnit.SECONDS));
}};
}
};
}
- private static final class DelegatingShardCreator implements Creator<Shard> {
- private final Creator<Shard> delegate;
+ private NormalizedNode<?,?> readStore(TestActorRef<Shard> shard, YangInstanceIdentifier id)
+ throws ExecutionException, InterruptedException {
+ DOMStoreReadTransaction transaction = shard.underlyingActor().getDataStore().newReadOnlyTransaction();
- DelegatingShardCreator(Creator<Shard> delegate) {
- this.delegate = delegate;
- }
+ CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> future =
+ transaction.read(id);
- @Override
- public Shard create() throws Exception {
- return delegate.create();
- }
+ Optional<NormalizedNode<?, ?>> optional = future.get();
+ NormalizedNode<?, ?> node = optional.isPresent()? optional.get() : null;
+
+ transaction.close();
+
+ return node;
}
- private static class ShardTestKit extends JavaTestKit {
+ private void writeToStore(TestActorRef<Shard> shard, YangInstanceIdentifier id, NormalizedNode<?,?> node)
+ throws ExecutionException, InterruptedException {
+ DOMStoreWriteTransaction transaction = shard.underlyingActor().getDataStore().newWriteOnlyTransaction();
- private ShardTestKit(ActorSystem actorSystem) {
- super(actorSystem);
- }
+ transaction.write(id, node);
- protected void waitForLogMessage(final Class logLevel, ActorRef subject, String logMessage){
- // Wait for a specific log message to show up
- final boolean result =
- new JavaTestKit.EventFilter<Boolean>(logLevel
- ) {
- @Override
- protected Boolean run() {
- return true;
- }
- }.from(subject.path().toString())
- .message(logMessage)
- .occurrences(1).exec();
+ DOMStoreThreePhaseCommitCohort commitCohort = transaction.ready();
+ commitCohort.preCommit().get();
+ commitCohort.commit().get();
+ }
- Assert.assertEquals(true, result);
+ private static final class DelegatingShardCreator implements Creator<Shard> {
+ private final Creator<Shard> delegate;
+ DelegatingShardCreator(Creator<Shard> delegate) {
+ this.delegate = delegate;
}
- protected void waitUntilLeader(ActorRef subject) {
- waitForLogMessage(Logging.Info.class, subject,
- "Switching from state Candidate to Leader");
+ @Override
+ public Shard create() throws Exception {
+ return delegate.create();
}
}
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore;
+
+import java.util.concurrent.TimeUnit;
+import org.junit.Assert;
+import org.opendaylight.controller.cluster.raft.client.messages.FindLeader;
+import org.opendaylight.controller.cluster.raft.client.messages.FindLeaderReply;
+import com.google.common.util.concurrent.Uninterruptibles;
+import scala.concurrent.Await;
+import scala.concurrent.Future;
+import scala.concurrent.duration.Duration;
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.pattern.Patterns;
+import akka.testkit.JavaTestKit;
+import akka.util.Timeout;
+
+class ShardTestKit extends JavaTestKit {
+
+ ShardTestKit(ActorSystem actorSystem) {
+ super(actorSystem);
+ }
+
+ protected void waitForLogMessage(final Class logLevel, ActorRef subject, String logMessage){
+ // Wait for a specific log message to show up
+ final boolean result =
+ new JavaTestKit.EventFilter<Boolean>(logLevel
+ ) {
+ @Override
+ protected Boolean run() {
+ return true;
+ }
+ }.from(subject.path().toString())
+ .message(logMessage)
+ .occurrences(1).exec();
+
+ Assert.assertEquals(true, result);
+
+ }
+
+ protected void waitUntilLeader(ActorRef shard) {
+ for(int i = 0; i < 20 * 5; i++) {
+ Future<Object> future = Patterns.ask(shard, new FindLeader(), new Timeout(5, TimeUnit.SECONDS));
+ try {
+ FindLeaderReply resp = (FindLeaderReply)Await.result(future, Duration.create(5, TimeUnit.SECONDS));
+ if(resp.getLeaderActor() != null) {
+ return;
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ Uninterruptibles.sleepUninterruptibly(50, TimeUnit.MILLISECONDS);
+ }
+
+ Assert.fail("Leader not found for shard " + shard.path());
+ }
+}
\ No newline at end of file
import akka.actor.ActorRef;
import akka.actor.Props;
import akka.testkit.TestActorRef;
-
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
-
import org.junit.BeforeClass;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.impl.schema.Builders;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.Duration;
-
import java.util.Collections;
import java.util.concurrent.TimeUnit;
ShardIdentifier.builder().memberName("member-1")
.shardName("inventory").type("operational").build();
- private final DatastoreContext datastoreContext = new DatastoreContext();
+ private final DatastoreContext datastoreContext = DatastoreContext.newBuilder().build();
private final ShardStats shardStats = new ShardStats(SHARD_IDENTIFIER.toString(), "DataStore");
}
private ActorRef createShard(){
- return getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, new DatastoreContext(), TestModel.createTestContext()));
+ return getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, datastoreContext,
+ TestModel.createTestContext()));
}
@Test(expected = ReadFailedException.class)
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newWriteOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props, "testNegativeMergeTransactionReady");
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
import akka.actor.Terminated;
import akka.testkit.JavaTestKit;
import akka.testkit.TestActorRef;
-
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
-
import org.junit.BeforeClass;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.exceptions.UnknownMessageException;
import org.opendaylight.controller.cluster.datastore.modification.WriteModification;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore;
-import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-
import scala.concurrent.duration.Duration;
-
import java.util.Collections;
import java.util.concurrent.TimeUnit;
-
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
ShardIdentifier.builder().memberName("member-1")
.shardName("inventory").type("config").build();
- private DatastoreContext datastoreContext = new DatastoreContext();
+ private DatastoreContext datastoreContext = DatastoreContext.newBuilder().build();
private final ShardStats shardStats = new ShardStats(SHARD_IDENTIFIER.toString(), "DataStore");
private ActorRef createShard(){
return getSystem().actorOf(Shard.props(SHARD_IDENTIFIER,
- Collections.EMPTY_MAP, new DatastoreContext(), TestModel.createTestContext()));
+ Collections.EMPTY_MAP, datastoreContext, TestModel.createTestContext()));
}
@Test
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject = getSystem().actorOf(props, "testReadData");
new Within(duration("1 seconds")) {
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props( store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject = getSystem().actorOf(props, "testReadDataWhenDataNotFound");
new Within(duration("1 seconds")) {
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject = getSystem().actorOf(props, "testDataExistsPositive");
new Within(duration("1 seconds")) {
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject = getSystem().actorOf(props, "testDataExistsNegative");
new Within(duration("1 seconds")) {
private void assertModification(final ActorRef subject,
final Class<? extends Modification> modificationType) {
new JavaTestKit(getSystem()) {{
- new Within(duration("1 seconds")) {
+ new Within(duration("3 seconds")) {
@Override
protected void run() {
subject
- .tell(new ShardTransaction.GetCompositedModification(),
+ .tell(new ShardWriteTransaction.GetCompositedModification(),
getRef());
final CompositeModification compositeModification =
- new ExpectMsg<CompositeModification>(duration("1 seconds"), "match hint") {
+ new ExpectMsg<CompositeModification>(duration("3 seconds"), "match hint") {
// do not put code outside this method, will run afterwards
@Override
protected CompositeModification match(Object in) {
- if (in instanceof ShardTransaction.GetCompositeModificationReply) {
- return ((ShardTransaction.GetCompositeModificationReply) in)
+ if (in instanceof ShardWriteTransaction.GetCompositeModificationReply) {
+ return ((ShardWriteTransaction.GetCompositeModificationReply) in)
.getModification();
} else {
throw noMatch();
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newWriteOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject =
getSystem().actorOf(props, "testWriteData");
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject =
getSystem().actorOf(props, "testMergeData");
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props( store.newWriteOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject =
getSystem().actorOf(props, "testDeleteData");
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props( store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject =
getSystem().actorOf(props, "testReadyTransaction");
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject =
getSystem().actorOf(props, "testCloseTransaction");
public void testNegativePerformingWriteOperationOnReadTransaction() throws Exception {
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef subject = TestActorRef.apply(props,getSystem());
subject.receive(new DeleteData(TestModel.TEST_PATH).toSerializable(), ActorRef.noSender());
@Test
public void testShardTransactionInactivity() {
- datastoreContext = new DatastoreContext("Test",
- InMemoryDOMDataStoreConfigProperties.getDefault(),
- Duration.create(500, TimeUnit.MILLISECONDS), 5, 1000, 1000, 500);
+ datastoreContext = DatastoreContext.newBuilder().shardTransactionIdleTimeout(
+ Duration.create(500, TimeUnit.MILLISECONDS)).build();
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject =
getSystem().actorOf(props, "testShardTransactionInactivity");
+++ /dev/null
-/*
- *
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- *
- */
-
-package org.opendaylight.controller.cluster.datastore;
-
-import akka.actor.ActorRef;
-import akka.actor.Props;
-import akka.testkit.TestActorRef;
-import akka.util.Timeout;
-
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.mockito.Mockito;
-import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
-import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
-import org.opendaylight.controller.cluster.datastore.messages.ForwardedCommitTransaction;
-import org.opendaylight.controller.cluster.datastore.modification.CompositeModification;
-import org.opendaylight.controller.cluster.datastore.modification.Modification;
-import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
-import org.opendaylight.controller.md.sal.common.api.data.OptimisticLockFailedException;
-import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore;
-import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
-import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
-import org.opendaylight.controller.protobuff.messages.persistent.PersistentMessages;
-import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-
-import scala.concurrent.Await;
-import scala.concurrent.Future;
-import scala.concurrent.duration.Duration;
-import scala.concurrent.duration.FiniteDuration;
-
-import java.util.Collections;
-import java.util.concurrent.TimeUnit;
-
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.when;
-
-
-public class ThreePhaseCommitCohortFailureTest extends AbstractActorTest {
-
- private static ListeningExecutorService storeExecutor =
- MoreExecutors.listeningDecorator(MoreExecutors.sameThreadExecutor());
-
- private static final InMemoryDOMDataStore store =
- new InMemoryDOMDataStore("OPER", storeExecutor,
- MoreExecutors.sameThreadExecutor());
-
- private static final SchemaContext testSchemaContext =
- TestModel.createTestContext();
-
- private static final ShardIdentifier SHARD_IDENTIFIER =
- ShardIdentifier.builder().memberName("member-1")
- .shardName("inventory").type("config").build();
-
- private final DatastoreContext datastoreContext = new DatastoreContext();
-
- private final ShardStats shardStats = new ShardStats(SHARD_IDENTIFIER.toString(), "DataStore");
-
- @BeforeClass
- public static void staticSetup() {
- store.onGlobalContextUpdated(testSchemaContext);
- }
-
- private final FiniteDuration ASK_RESULT_DURATION = Duration.create(5000, TimeUnit.MILLISECONDS);
-
- private ActorRef createShard(){
- return getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, datastoreContext, TestModel.createTestContext()));
- }
-
- @Test(expected = TestException.class)
- public void testNegativeAbortResultsInException() throws Exception {
-
- final ActorRef shard = createShard();
- final DOMStoreThreePhaseCommitCohort mockCohort = Mockito
- .mock(DOMStoreThreePhaseCommitCohort.class);
- final CompositeModification mockComposite =
- Mockito.mock(CompositeModification.class);
- final Props props =
- ThreePhaseCommitCohort.props(mockCohort, shard, mockComposite, shardStats);
-
- final TestActorRef<ThreePhaseCommitCohort> subject = TestActorRef
- .create(getSystem(), props,
- "testNegativeAbortResultsInException");
-
- when(mockCohort.abort()).thenReturn(
- Futures.<Void>immediateFailedFuture(new TestException()));
-
- Future<Object> future =
- akka.pattern.Patterns.ask(subject,
- ThreePhaseCommitCohortMessages.AbortTransaction.newBuilder()
- .build(), 3000);
- assertTrue(future.isCompleted());
-
- Await.result(future, ASK_RESULT_DURATION);
- }
-
-
- @Test(expected = OptimisticLockFailedException.class)
- public void testNegativeCanCommitResultsInException() throws Exception {
-
- final ActorRef shard = createShard();
- final DOMStoreThreePhaseCommitCohort mockCohort = Mockito
- .mock(DOMStoreThreePhaseCommitCohort.class);
- final CompositeModification mockComposite =
- Mockito.mock(CompositeModification.class);
- final Props props =
- ThreePhaseCommitCohort.props(mockCohort, shard, mockComposite, shardStats);
-
- final TestActorRef<ThreePhaseCommitCohort> subject = TestActorRef
- .create(getSystem(), props,
- "testNegativeCanCommitResultsInException");
-
- when(mockCohort.canCommit()).thenReturn(
- Futures
- .<Boolean>immediateFailedFuture(
- new OptimisticLockFailedException("some exception")));
-
- Future<Object> future =
- akka.pattern.Patterns.ask(subject,
- ThreePhaseCommitCohortMessages.CanCommitTransaction.newBuilder()
- .build(), 3000);
-
-
- Await.result(future, ASK_RESULT_DURATION);
-
- }
-
-
- @Test(expected = TestException.class)
- public void testNegativePreCommitResultsInException() throws Exception {
-
- final ActorRef shard = createShard();
- final DOMStoreThreePhaseCommitCohort mockCohort = Mockito
- .mock(DOMStoreThreePhaseCommitCohort.class);
- final CompositeModification mockComposite =
- Mockito.mock(CompositeModification.class);
- final Props props =
- ThreePhaseCommitCohort.props(mockCohort, shard, mockComposite, shardStats);
-
- final TestActorRef<ThreePhaseCommitCohort> subject = TestActorRef
- .create(getSystem(), props,
- "testNegativePreCommitResultsInException");
-
- when(mockCohort.preCommit()).thenReturn(
- Futures
- .<Void>immediateFailedFuture(
- new TestException()));
-
- Future<Object> future =
- akka.pattern.Patterns.ask(subject,
- ThreePhaseCommitCohortMessages.PreCommitTransaction.newBuilder()
- .build(), 3000);
-
- Await.result(future, ASK_RESULT_DURATION);
-
- }
-
- @Test(expected = TestException.class)
- public void testNegativeCommitResultsInException() throws Exception {
-
- final TestActorRef<Shard> subject = TestActorRef.create(getSystem(),
- Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, datastoreContext, TestModel.createTestContext()),
- "testNegativeCommitResultsInException");
-
- final ActorRef shardTransaction =
- getSystem().actorOf(ShardTransaction.props(store.newReadWriteTransaction(), subject,
- testSchemaContext, datastoreContext, shardStats));
-
- ShardTransactionMessages.WriteData writeData =
- ShardTransactionMessages.WriteData.newBuilder()
- .setInstanceIdentifierPathArguments(
- NormalizedNodeMessages.InstanceIdentifier.newBuilder()
- .build()).setNormalizedNode(
- NormalizedNodeMessages.Node.newBuilder().build()
-
- ).build();
-
- Timeout askTimeout = new Timeout(ASK_RESULT_DURATION);
-
- //This is done so that Modification list is updated which is used during commit
- Future<Object> future = akka.pattern.Patterns.ask(shardTransaction, writeData, askTimeout);
-
- //ready transaction creates the cohort so that we get into the
- //block where in commmit is done
- ShardTransactionMessages.ReadyTransaction readyTransaction =
- ShardTransactionMessages.ReadyTransaction.newBuilder().build();
-
- future = akka.pattern.Patterns.ask(shardTransaction, readyTransaction, askTimeout);
-
- //but when the message is sent it will have the MockCommit object
- //so that we can simulate throwing of exception
- ForwardedCommitTransaction mockForwardCommitTransaction =
- Mockito.mock(ForwardedCommitTransaction.class);
- DOMStoreThreePhaseCommitCohort mockThreePhaseCommitTransaction =
- Mockito.mock(DOMStoreThreePhaseCommitCohort.class);
- when(mockForwardCommitTransaction.getCohort())
- .thenReturn(mockThreePhaseCommitTransaction);
- when(mockThreePhaseCommitTransaction.commit()).thenReturn(Futures
- .<Void>immediateFailedFuture(
- new TestException()));
- Modification mockModification = Mockito.mock(
- Modification.class);
- when(mockForwardCommitTransaction.getModification())
- .thenReturn(mockModification);
-
- when(mockModification.toSerializable()).thenReturn(
- PersistentMessages.CompositeModification.newBuilder().build());
-
- future = akka.pattern.Patterns.ask(subject, mockForwardCommitTransaction, askTimeout);
- Await.result(future, ASK_RESULT_DURATION);
- }
-
- private class TestException extends Exception {
- }
-}
import akka.actor.ActorSelection;
import akka.actor.Props;
import akka.dispatch.Futures;
-
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.any;
-import static org.mockito.Mockito.isA;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.times;
-
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.opendaylight.controller.cluster.datastore.messages.SerializableMessage;
import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
import org.opendaylight.controller.cluster.datastore.utils.DoNothingActor;
-
import scala.concurrent.Future;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.isA;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
public class ThreePhaseCommitCohortProxyTest extends AbstractActorTest {
@SuppressWarnings("serial")
doReturn(getSystem()).when(actorContext).getActorSystem();
}
- private Future<ActorPath> newCohortPath() {
+ private Future<ActorSelection> newCohort() {
ActorPath path = getSystem().actorOf(Props.create(DoNothingActor.class)).path();
- doReturn(mock(ActorSelection.class)).when(actorContext).actorSelection(path);
- return Futures.successful(path);
+ ActorSelection actorSelection = getSystem().actorSelection(path);
+ return Futures.successful(actorSelection);
}
private final ThreePhaseCommitCohortProxy setupProxy(int nCohorts) throws Exception {
- List<Future<ActorPath>> cohortPathFutures = Lists.newArrayList();
+ List<Future<ActorSelection>> cohortFutures = Lists.newArrayList();
for(int i = 1; i <= nCohorts; i++) {
- cohortPathFutures.add(newCohortPath());
+ cohortFutures.add(newCohort());
}
- return new ThreePhaseCommitCohortProxy(actorContext, cohortPathFutures, "txn-1");
+ return new ThreePhaseCommitCohortProxy(actorContext, cohortFutures, "txn-1");
}
private ThreePhaseCommitCohortProxy setupProxyWithFailedCohortPath()
throws Exception {
- List<Future<ActorPath>> cohortPathFutures = Lists.newArrayList();
- cohortPathFutures.add(newCohortPath());
- cohortPathFutures.add(Futures.<ActorPath>failed(new TestException()));
+ List<Future<ActorSelection>> cohortFutures = Lists.newArrayList();
+ cohortFutures.add(newCohort());
+ cohortFutures.add(Futures.<ActorSelection>failed(new TestException()));
- return new ThreePhaseCommitCohortProxy(actorContext, cohortPathFutures, "txn-1");
+ return new ThreePhaseCommitCohortProxy(actorContext, cohortFutures, "txn-1");
}
private void setupMockActorContext(Class<?> requestType, Object... responses) {
.successful(((SerializableMessage) responses[i]).toSerializable()));
}
- stubber.when(actorContext).executeRemoteOperationAsync(any(ActorSelection.class),
+ stubber.when(actorContext).executeOperationAsync(any(ActorSelection.class),
isA(requestType));
}
private void verifyCohortInvocations(int nCohorts, Class<?> requestType) {
- verify(actorContext, times(nCohorts)).executeRemoteOperationAsync(
+ verify(actorContext, times(nCohorts)).executeOperationAsync(
any(ActorSelection.class), isA(requestType));
}
@Test
public void testPreCommit() throws Exception {
+ // Precommit is currently a no-op
ThreePhaseCommitCohortProxy proxy = setupProxy(1);
setupMockActorContext(PreCommitTransaction.SERIALIZABLE_CLASS,
new PreCommitTransactionReply());
proxy.preCommit().get(5, TimeUnit.SECONDS);
-
- verifyCohortInvocations(1, PreCommitTransaction.SERIALIZABLE_CLASS);
- }
-
- @Test(expected = ExecutionException.class)
- public void testPreCommitWithFailure() throws Exception {
- ThreePhaseCommitCohortProxy proxy = setupProxy(2);
-
- setupMockActorContext(PreCommitTransaction.SERIALIZABLE_CLASS,
- new PreCommitTransactionReply(), new RuntimeException("mock"));
-
- proxy.preCommit().get(5, TimeUnit.SECONDS);
}
@Test
proxy.commit().get(5, TimeUnit.SECONDS);
verifyCohortInvocations(2, CanCommitTransaction.SERIALIZABLE_CLASS);
- verifyCohortInvocations(2, PreCommitTransaction.SERIALIZABLE_CLASS);
verifyCohortInvocations(2, CommitTransaction.SERIALIZABLE_CLASS);
}
}
package org.opendaylight.controller.cluster.datastore;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.fail;
-import akka.actor.ActorPath;
+import com.google.common.util.concurrent.CheckedFuture;
+
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
import akka.actor.Props;
import akka.dispatch.Futures;
-
import com.google.common.base.Optional;
-import com.google.common.util.concurrent.CheckedFuture;
-
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentMatcher;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
-
-import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_ONLY;
-import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.WRITE_ONLY;
-import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_WRITE;
-
import org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType;
import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.Duration;
+
import java.util.List;
import java.util.concurrent.TimeUnit;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.argThat;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.argThat;
import static org.mockito.Mockito.eq;
-import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.isA;
import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_ONLY;
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_WRITE;
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.WRITE_ONLY;
@SuppressWarnings("resource")
public class TransactionProxyTest extends AbstractActorTest {
return argThat(matcher);
}
- private Future<Object> readyTxReply(ActorPath path) {
+ private Future<Object> readyTxReply(String path) {
return Futures.successful((Object)new ReadyTransactionReply(path).toSerializable());
}
ActorRef actorRef = getSystem().actorOf(Props.create(DoNothingActor.class));
doReturn(getSystem().actorSelection(actorRef.path())).
when(mockActorContext).actorSelection(actorRef.path().toString());
+
+ doReturn(Optional.of(getSystem().actorSelection(actorRef.path()))).
+ when(mockActorContext).findPrimaryShard(eq(DefaultShardStrategy.DEFAULT_SHARD));
+
doReturn(createTransactionReply(actorRef)).when(mockActorContext).
- executeShardOperation(eq(DefaultShardStrategy.DEFAULT_SHARD),
+ executeOperation(eq(getSystem().actorSelection(actorRef.path())),
eqCreateTransaction(memberName, type));
- doReturn(actorRef.path().toString()).when(mockActorContext).resolvePath(
- anyString(), eq(actorRef.path().toString()));
- doReturn(actorRef.path()).when(mockActorContext).actorFor(actorRef.path().toString());
-
return actorRef;
}
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
READ_ONLY);
- doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readDataReply(null)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
Optional<NormalizedNode<?, ?>> readOptional = transactionProxy.read(
NormalizedNode<?, ?> expectedNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(readDataReply(expectedNode)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readDataReply(expectedNode)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
readOptional = transactionProxy.read(TestModel.TEST_PATH).get(5, TimeUnit.SECONDS);
setupActorContextWithInitialCreateTransaction(READ_ONLY);
doReturn(Futures.successful(new Object())).when(mockActorContext).
- executeRemoteOperationAsync(any(ActorSelection.class), any());
+ executeOperationAsync(any(ActorSelection.class), any());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
READ_ONLY);
setupActorContextWithInitialCreateTransaction(READ_ONLY);
doReturn(Futures.failed(new TestException())).when(mockActorContext).
- executeRemoteOperationAsync(any(ActorSelection.class), any());
+ executeOperationAsync(any(ActorSelection.class), any());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
READ_ONLY);
private void testExceptionOnInitialCreateTransaction(Exception exToThrow, Invoker invoker)
throws Throwable {
+ ActorRef actorRef = getSystem().actorOf(Props.create(DoNothingActor.class));
- doThrow(exToThrow).when(mockActorContext).executeShardOperation(
- anyString(), any());
+ if (exToThrow instanceof PrimaryNotFoundException) {
+ doReturn(Optional.absent()).when(mockActorContext).findPrimaryShard(anyString());
+ } else {
+ doReturn(Optional.of(getSystem().actorSelection(actorRef.path()))).
+ when(mockActorContext).findPrimaryShard(anyString());
+ }
+ doThrow(exToThrow).when(mockActorContext).executeOperation(any(ActorSelection.class), any());
- TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
- READ_ONLY);
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext, READ_ONLY);
propagateReadFailedExceptionCause(invoker.invoke(transactionProxy));
}
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
doReturn(Futures.failed(new TestException())).when(mockActorContext).
- executeRemoteOperationAsync(eq(actorSelection(actorRef)), eqDeleteData());
+ executeOperationAsync(eq(actorSelection(actorRef)), eqDeleteData());
- doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readDataReply(null)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
try {
propagateReadFailedExceptionCause(transactionProxy.read(TestModel.TEST_PATH));
} finally {
- verify(mockActorContext, times(0)).executeRemoteOperationAsync(
+ verify(mockActorContext, times(0)).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
}
}
NormalizedNode<?, ?> expectedNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(expectedNode));
- doReturn(readDataReply(expectedNode)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readDataReply(expectedNode)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
READ_ONLY);
- doReturn(dataExistsReply(false)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(dataExistsReply(false)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqDataExists());
Boolean exists = transactionProxy.exists(TestModel.TEST_PATH).checkedGet();
assertEquals("Exists response", false, exists);
- doReturn(dataExistsReply(true)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(dataExistsReply(true)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqDataExists());
exists = transactionProxy.exists(TestModel.TEST_PATH).checkedGet();
setupActorContextWithInitialCreateTransaction(READ_ONLY);
doReturn(Futures.successful(new Object())).when(mockActorContext).
- executeRemoteOperationAsync(any(ActorSelection.class), any());
+ executeOperationAsync(any(ActorSelection.class), any());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
READ_ONLY);
setupActorContextWithInitialCreateTransaction(READ_ONLY);
doReturn(Futures.failed(new TestException())).when(mockActorContext).
- executeRemoteOperationAsync(any(ActorSelection.class), any());
+ executeOperationAsync(any(ActorSelection.class), any());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
READ_ONLY);
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
doReturn(Futures.failed(new TestException())).when(mockActorContext).
- executeRemoteOperationAsync(eq(actorSelection(actorRef)), eqDeleteData());
+ executeOperationAsync(eq(actorSelection(actorRef)), eqDeleteData());
- doReturn(dataExistsReply(false)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(dataExistsReply(false)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqDataExists());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
try {
propagateReadFailedExceptionCause(transactionProxy.exists(TestModel.TEST_PATH));
} finally {
- verify(mockActorContext, times(0)).executeRemoteOperationAsync(
+ verify(mockActorContext, times(0)).executeOperationAsync(
eq(actorSelection(actorRef)), eqDataExists());
}
}
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
- doReturn(dataExistsReply(true)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(dataExistsReply(true)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqDataExists());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
transactionProxy.write(TestModel.TEST_PATH, nodeToWrite);
- verify(mockActorContext).executeRemoteOperationAsync(
+ verify(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(mergeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(mergeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqMergeData(nodeToWrite));
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
transactionProxy.merge(TestModel.TEST_PATH, nodeToWrite);
- verify(mockActorContext).executeRemoteOperationAsync(
+ verify(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqMergeData(nodeToWrite));
verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
public void testDelete() throws Exception {
ActorRef actorRef = setupActorContextWithInitialCreateTransaction(WRITE_ONLY);
- doReturn(deleteDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(deleteDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqDeleteData());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
transactionProxy.delete(TestModel.TEST_PATH);
- verify(mockActorContext).executeRemoteOperationAsync(
+ verify(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqDeleteData());
verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
DeleteDataReply.SERIALIZABLE_CLASS);
}
- private void verifyCohortPathFutures(ThreePhaseCommitCohortProxy proxy,
- Object... expReplies) throws Exception {
+ private void verifyCohortFutures(ThreePhaseCommitCohortProxy proxy,
+ Object... expReplies) throws Exception {
assertEquals("getReadyOperationFutures size", expReplies.length,
- proxy.getCohortPathFutures().size());
+ proxy.getCohortFutures().size());
int i = 0;
- for( Future<ActorPath> future: proxy.getCohortPathFutures()) {
+ for( Future<ActorSelection> future: proxy.getCohortFutures()) {
assertNotNull("Ready operation Future is null", future);
Object expReply = expReplies[i++];
- if(expReply instanceof ActorPath) {
- ActorPath actual = Await.result(future, Duration.create(5, TimeUnit.SECONDS));
- assertEquals("Cohort actor path", expReply, actual);
+ if(expReply instanceof ActorSelection) {
+ ActorSelection actual = Await.result(future, Duration.create(5, TimeUnit.SECONDS));
+ assertEquals("Cohort actor path", (ActorSelection) expReply, actual);
} else {
// Expecting exception.
try {
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readDataReply(null)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
- doReturn(readyTxReply(actorRef.path())).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readyTxReply(actorRef.path().toString())).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), isA(ReadyTransaction.SERIALIZABLE_CLASS));
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
WriteDataReply.SERIALIZABLE_CLASS);
- verifyCohortPathFutures(proxy, actorRef.path());
+ verifyCohortFutures(proxy, getSystem().actorSelection(actorRef.path()));
}
@SuppressWarnings("unchecked")
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(mergeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(mergeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqMergeData(nodeToWrite));
doReturn(Futures.failed(new TestException())).when(mockActorContext).
- executeRemoteOperationAsync(eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
+ executeOperationAsync(eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
- doReturn(readyTxReply(actorRef.path())).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readyTxReply(actorRef.path().toString())).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), isA(ReadyTransaction.SERIALIZABLE_CLASS));
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
MergeDataReply.SERIALIZABLE_CLASS, TestException.class);
- verifyCohortPathFutures(proxy, TestException.class);
+ verifyCohortFutures(proxy, TestException.class);
}
@SuppressWarnings("unchecked")
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(mergeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(mergeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqMergeData(nodeToWrite));
doReturn(Futures.failed(new TestException())).when(mockActorContext).
- executeRemoteOperationAsync(eq(actorSelection(actorRef)),
+ executeOperationAsync(eq(actorSelection(actorRef)),
isA(ReadyTransaction.SERIALIZABLE_CLASS));
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
MergeDataReply.SERIALIZABLE_CLASS);
- verifyCohortPathFutures(proxy, TestException.class);
+ verifyCohortFutures(proxy, TestException.class);
}
@Test
public void testReadyWithInitialCreateTransactionFailure() throws Exception {
- doThrow(new PrimaryNotFoundException("mock")).when(mockActorContext).executeShardOperation(
- anyString(), any());
+ doReturn(Optional.absent()).when(mockActorContext).findPrimaryShard(anyString());
+// doThrow(new PrimaryNotFoundException("mock")).when(mockActorContext).executeShardOperation(
+// anyString(), any());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
WRITE_ONLY);
ThreePhaseCommitCohortProxy proxy = (ThreePhaseCommitCohortProxy) ready;
- verifyCohortPathFutures(proxy, PrimaryNotFoundException.class);
+ verifyCohortFutures(proxy, PrimaryNotFoundException.class);
}
@SuppressWarnings("unchecked")
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
doReturn(Futures.successful(new Object())).when(mockActorContext).
- executeRemoteOperationAsync(eq(actorSelection(actorRef)),
+ executeOperationAsync(eq(actorSelection(actorRef)),
isA(ReadyTransaction.SERIALIZABLE_CLASS));
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
ThreePhaseCommitCohortProxy proxy = (ThreePhaseCommitCohortProxy) ready;
- verifyCohortPathFutures(proxy, IllegalArgumentException.class);
+ verifyCohortFutures(proxy, IllegalArgumentException.class);
}
@Test
public void testClose() throws Exception{
ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_WRITE);
- doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readDataReply(null)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
transactionProxy.close();
- verify(mockActorContext).sendRemoteOperationAsync(
+ verify(mockActorContext).sendOperationAsync(
eq(actorSelection(actorRef)), isA(CloseTransaction.SERIALIZABLE_CLASS));
}
}
assertEquals("member-1-shard-inventory-config", id.toString());
}
+ @Test
+ public void testFromShardIdString(){
+ String shardIdStr = "member-1-shard-inventory-config";
+
+ ShardIdentifier id = ShardIdentifier.builder().fromShardIdString(shardIdStr).build();
+ assertEquals("member-1", id.getMemberName());
+ assertEquals("inventory", id.getShardName());
+ assertEquals("config", id.getType());
+ }
}
package org.opendaylight.controller.cluster.datastore.utils;
-import java.util.concurrent.TimeUnit;
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
-import akka.actor.ActorSystem;
import akka.actor.Props;
import akka.actor.UntypedActor;
import akka.japi.Creator;
import akka.testkit.JavaTestKit;
+import com.google.common.base.Optional;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.AbstractActorTest;
import org.opendaylight.controller.cluster.datastore.messages.FindLocalShard;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardNotFound;
-
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.Duration;
+
+import java.util.concurrent.TimeUnit;
+
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
public class ActorContextTest extends AbstractActorTest{
- @Test
- public void testResolvePathForRemoteActor(){
- ActorContext actorContext =
- new ActorContext(mock(ActorSystem.class), mock(ActorRef.class),mock(
- ClusterWrapper.class),
- mock(Configuration.class));
-
- String actual = actorContext.resolvePath(
- "akka.tcp://system@127.0.0.1:2550/user/shardmanager/shard",
- "akka://system/user/shardmanager/shard/transaction");
-
- String expected = "akka.tcp://system@127.0.0.1:2550/user/shardmanager/shard/transaction";
-
- assertEquals(expected, actual);
- }
-
- @Test
- public void testResolvePathForLocalActor(){
- ActorContext actorContext =
- new ActorContext(getSystem(), mock(ActorRef.class), mock(ClusterWrapper.class),
- mock(Configuration.class));
-
- String actual = actorContext.resolvePath(
- "akka://system/user/shardmanager/shard",
- "akka://system/user/shardmanager/shard/transaction");
-
- String expected = "akka://system/user/shardmanager/shard/transaction";
-
- assertEquals(expected, actual);
-
- System.out.println(actorContext
- .actorFor("akka://system/user/shardmanager/shard/transaction"));
- }
-
private static class MockShardManager extends UntypedActor {
}
}
- @Test
- public void testExecuteLocalShardOperationWithShardFound(){
- new JavaTestKit(getSystem()) {{
-
- new Within(duration("1 seconds")) {
- @Override
- protected void run() {
-
- ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
-
- ActorRef shardManagerActorRef = getSystem()
- .actorOf(MockShardManager.props(true, shardActorRef));
-
- ActorContext actorContext =
- new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
- mock(Configuration.class));
-
- Object out = actorContext.executeLocalShardOperation("default", "hello");
-
- assertEquals("hello", out);
-
-
- expectNoMsg();
- }
- };
- }};
-
- }
-
- @Test
- public void testExecuteLocalShardOperationWithShardNotFound(){
- new JavaTestKit(getSystem()) {{
-
- new Within(duration("1 seconds")) {
- @Override
- protected void run() {
-
- ActorRef shardManagerActorRef = getSystem()
- .actorOf(MockShardManager.props(false, null));
-
- ActorContext actorContext =
- new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
- mock(Configuration.class));
-
- Object out = actorContext.executeLocalShardOperation("default", "hello");
-
- assertNull(out);
-
-
- expectNoMsg();
- }
- };
- }};
-
- }
-
-
@Test
public void testFindLocalShardWithShardFound(){
new JavaTestKit(getSystem()) {{
new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
mock(Configuration.class));
- Object out = actorContext.findLocalShard("default");
+ Optional<ActorRef> out = actorContext.findLocalShard("default");
- assertEquals(shardActorRef, out);
+ assertEquals(shardActorRef, out.get());
expectNoMsg();
new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
mock(Configuration.class));
- Object out = actorContext.findLocalShard("default");
-
- assertNull(out);
-
-
+ Optional<ActorRef> out = actorContext.findLocalShard("default");
+ assertTrue(!out.isPresent());
expectNoMsg();
}
};
ActorSelection actor = actorContext.actorSelection(shardActorRef.path());
- Object out = actorContext.executeRemoteOperation(actor, "hello");
+ Object out = actorContext.executeOperation(actor, "hello");
assertEquals("hello", out);
ActorSelection actor = actorContext.actorSelection(shardActorRef.path());
- Future<Object> future = actorContext.executeRemoteOperationAsync(actor, "hello");
+ Future<Object> future = actorContext.executeOperationAsync(actor, "hello");
try {
Object result = Await.result(future, Duration.create(3, TimeUnit.SECONDS));
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
import akka.actor.ActorSystem;
+import com.google.common.base.Optional;
public class MockActorContext extends ActorContext {
super(actorSystem, shardManager, new MockClusterWrapper(), new MockConfiguration());
}
-
- @Override public Object executeShardOperation(String shardName,
- Object message) {
- return executeShardOperationResponse;
- }
-
- @Override public Object executeRemoteOperation(ActorSelection actor,
- Object message) {
+ @Override public Object executeOperation(ActorSelection actor,
+ Object message) {
return executeRemoteOperationResponse;
}
- @Override public ActorSelection findPrimary(String shardName) {
- return null;
+ @Override public Optional<ActorSelection> findPrimaryShard(String shardName) {
+ return Optional.absent();
}
public void setExecuteShardOperationResponse(Object response){
}
@Override
- public Object executeLocalOperation(ActorRef actor,
- Object message) {
+ public Object executeOperation(ActorRef actor,
+ Object message) {
return this.executeLocalOperationResponse;
}
- @Override
- public Object executeLocalShardOperation(String shardName,
- Object message) {
- return this.executeLocalShardOperationResponse;
- }
}
ActorContext testContext = new ActorContext(actorSystem, actorSystem.actorOf(
Props.create(DoNothingActor.class)), new MockClusterWrapper(), new MockConfiguration());
Object messages = testContext
- .executeLocalOperation(actorRef, "messages");
+ .executeOperation(actorRef, "messages");
Assert.assertNotNull(messages);
Set<QName> childAugmentations = new HashSet<>();
childAugmentations.add(AUG_QNAME);
final YangInstanceIdentifier.AugmentationIdentifier augmentationIdentifier =
- new YangInstanceIdentifier.AugmentationIdentifier(null, childAugmentations);
+ new YangInstanceIdentifier.AugmentationIdentifier(childAugmentations);
final AugmentationNode augmentationNode =
Builders.augmentationBuilder()
.withNodeIdentifier(augmentationIdentifier)
private BackwardsCompatibleMountPointManager compatibleMountPointManager;
static final QName qName = QName.create("namespace", "12-12-1212", "mount");
- static final YangInstanceIdentifier id = YangInstanceIdentifier.builder(qName).build();
+ static final YangInstanceIdentifier id = YangInstanceIdentifier.of(qName);
@Before
public void setUp() throws Exception {
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.store.impl;
+
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.FutureCallback;
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListenableFuture;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+
+final class ChainedTransactionCommitImpl implements DOMStoreThreePhaseCommitCohort {
+ private final SnapshotBackedWriteTransaction transaction;
+ private final DOMStoreThreePhaseCommitCohort delegate;
+ private final DOMStoreTransactionChainImpl txChain;
+
+ protected ChainedTransactionCommitImpl(final SnapshotBackedWriteTransaction transaction,
+ final DOMStoreThreePhaseCommitCohort delegate, final DOMStoreTransactionChainImpl txChain) {
+ this.transaction = Preconditions.checkNotNull(transaction);
+ this.delegate = Preconditions.checkNotNull(delegate);
+ this.txChain = Preconditions.checkNotNull(txChain);
+ }
+
+ @Override
+ public ListenableFuture<Boolean> canCommit() {
+ return delegate.canCommit();
+ }
+
+ @Override
+ public ListenableFuture<Void> preCommit() {
+ return delegate.preCommit();
+ }
+
+ @Override
+ public ListenableFuture<Void> abort() {
+ return delegate.abort();
+ }
+
+ @Override
+ public ListenableFuture<Void> commit() {
+ ListenableFuture<Void> commitFuture = delegate.commit();
+ Futures.addCallback(commitFuture, new FutureCallback<Void>() {
+ @Override
+ public void onFailure(final Throwable t) {
+ txChain.onTransactionFailed(transaction, t);
+ }
+
+ @Override
+ public void onSuccess(final Void result) {
+ txChain.onTransactionCommited(transaction);
+ }
+ });
+ return commitFuture;
+ }
+}
\ No newline at end of file
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.store.impl;
+
+import com.google.common.base.Preconditions;
+import java.util.AbstractMap.SimpleEntry;
+import java.util.Map.Entry;
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+import org.opendaylight.controller.md.sal.dom.store.impl.SnapshotBackedWriteTransaction.TransactionReadyPrototype;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
+import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeModification;
+import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeSnapshot;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+final class DOMStoreTransactionChainImpl extends TransactionReadyPrototype implements DOMStoreTransactionChain {
+ private static abstract class State {
+ /**
+ * Allocate a new snapshot.
+ *
+ * @return A new snapshot
+ */
+ protected abstract DataTreeSnapshot getSnapshot();
+ }
+
+ private static final class Idle extends State {
+ private final InMemoryDOMDataStore store;
+
+ Idle(final InMemoryDOMDataStore store) {
+ this.store = Preconditions.checkNotNull(store);
+ }
+
+ @Override
+ protected DataTreeSnapshot getSnapshot() {
+ return store.takeSnapshot();
+ }
+ }
+
+ /**
+ * We have a transaction out there.
+ */
+ private static final class Allocated extends State {
+ private static final AtomicReferenceFieldUpdater<Allocated, DataTreeSnapshot> SNAPSHOT_UPDATER =
+ AtomicReferenceFieldUpdater.newUpdater(Allocated.class, DataTreeSnapshot.class, "snapshot");
+ private final DOMStoreWriteTransaction transaction;
+ private volatile DataTreeSnapshot snapshot;
+
+ Allocated(final DOMStoreWriteTransaction transaction) {
+ this.transaction = Preconditions.checkNotNull(transaction);
+ }
+
+ public DOMStoreWriteTransaction getTransaction() {
+ return transaction;
+ }
+
+ @Override
+ protected DataTreeSnapshot getSnapshot() {
+ final DataTreeSnapshot ret = snapshot;
+ Preconditions.checkState(ret != null, "Previous transaction %s is not ready yet", transaction.getIdentifier());
+ return ret;
+ }
+
+ void setSnapshot(final DataTreeSnapshot snapshot) {
+ final boolean success = SNAPSHOT_UPDATER.compareAndSet(this, null, snapshot);
+ Preconditions.checkState(success, "Transaction %s has already been marked as ready", transaction.getIdentifier());
+ }
+ }
+
+ /**
+ * Chain is logically shut down, no further allocation allowed.
+ */
+ private static final class Shutdown extends State {
+ private final String message;
+
+ Shutdown(final String message) {
+ this.message = Preconditions.checkNotNull(message);
+ }
+
+ @Override
+ protected DataTreeSnapshot getSnapshot() {
+ throw new IllegalStateException(message);
+ }
+ }
+
+ private static final AtomicReferenceFieldUpdater<DOMStoreTransactionChainImpl, State> STATE_UPDATER =
+ AtomicReferenceFieldUpdater.newUpdater(DOMStoreTransactionChainImpl.class, State.class, "state");
+ private static final Logger LOG = LoggerFactory.getLogger(DOMStoreTransactionChainImpl.class);
+ private static final Shutdown CLOSED = new Shutdown("Transaction chain is closed");
+ private static final Shutdown FAILED = new Shutdown("Transaction chain has failed");
+ private final InMemoryDOMDataStore store;
+ private final Idle idleState;
+ private volatile State state;
+
+ DOMStoreTransactionChainImpl(final InMemoryDOMDataStore store) {
+ this.store = Preconditions.checkNotNull(store);
+ idleState = new Idle(store);
+ state = idleState;
+ }
+
+ private Entry<State, DataTreeSnapshot> getSnapshot() {
+ final State localState = state;
+ return new SimpleEntry<>(localState, localState.getSnapshot());
+ }
+
+ private boolean recordTransaction(final State expected, final DOMStoreWriteTransaction transaction) {
+ final State state = new Allocated(transaction);
+ return STATE_UPDATER.compareAndSet(this, expected, state);
+ }
+
+ @Override
+ public DOMStoreReadTransaction newReadOnlyTransaction() {
+ final Entry<State, DataTreeSnapshot> entry = getSnapshot();
+ return new SnapshotBackedReadTransaction(store.nextIdentifier(), store.getDebugTransactions(), entry.getValue());
+ }
+
+ @Override
+ public DOMStoreReadWriteTransaction newReadWriteTransaction() {
+ Entry<State, DataTreeSnapshot> entry;
+ DOMStoreReadWriteTransaction ret;
+
+ do {
+ entry = getSnapshot();
+ ret = new SnapshotBackedReadWriteTransaction(store.nextIdentifier(),
+ store.getDebugTransactions(), entry.getValue(), this);
+ } while (!recordTransaction(entry.getKey(), ret));
+
+ return ret;
+ }
+
+ @Override
+ public DOMStoreWriteTransaction newWriteOnlyTransaction() {
+ Entry<State, DataTreeSnapshot> entry;
+ DOMStoreWriteTransaction ret;
+
+ do {
+ entry = getSnapshot();
+ ret = new SnapshotBackedWriteTransaction(store.nextIdentifier(),
+ store.getDebugTransactions(), entry.getValue(), this);
+ } while (!recordTransaction(entry.getKey(), ret));
+
+ return ret;
+ }
+
+ @Override
+ protected void transactionAborted(final SnapshotBackedWriteTransaction tx) {
+ final State localState = state;
+ if (localState instanceof Allocated) {
+ final Allocated allocated = (Allocated)localState;
+ if (allocated.getTransaction().equals(tx)) {
+ final boolean success = STATE_UPDATER.compareAndSet(this, localState, idleState);
+ if (!success) {
+ LOG.info("State already transitioned from {} to {}", localState, state);
+ }
+ }
+ }
+ }
+
+ @Override
+ protected DOMStoreThreePhaseCommitCohort transactionReady(final SnapshotBackedWriteTransaction tx, final DataTreeModification tree) {
+ final State localState = state;
+
+ if (localState instanceof Allocated) {
+ final Allocated allocated = (Allocated)localState;
+ final DOMStoreWriteTransaction transaction = allocated.getTransaction();
+ Preconditions.checkState(tx.equals(transaction), "Mis-ordered ready transaction %s last allocated was %s", tx, transaction);
+ allocated.setSnapshot(tree);
+ } else {
+ LOG.debug("Ignoring transaction {} readiness due to state {}", tx, localState);
+ }
+
+ return new ChainedTransactionCommitImpl(tx, store.transactionReady(tx, tree), this);
+ }
+
+ @Override
+ public void close() {
+ final State localState = state;
+
+ do {
+ Preconditions.checkState(!CLOSED.equals(localState), "Transaction chain {} has been closed", this);
+
+ if (FAILED.equals(localState)) {
+ LOG.debug("Ignoring user close in failed state");
+ return;
+ }
+ } while (!STATE_UPDATER.compareAndSet(this, localState, CLOSED));
+ }
+
+ void onTransactionFailed(final SnapshotBackedWriteTransaction transaction, final Throwable t) {
+ LOG.debug("Transaction chain {} failed on transaction {}", this, transaction, t);
+ state = FAILED;
+ }
+
+ void onTransactionCommited(final SnapshotBackedWriteTransaction transaction) {
+ // If the committed transaction was the one we allocated last,
+ // we clear it and the ready snapshot, so the next transaction
+ // allocated refers to the data tree directly.
+ final State localState = state;
+
+ if (!(localState instanceof Allocated)) {
+ LOG.debug("Ignoring successful transaction {} in state {}", transaction, localState);
+ return;
+ }
+
+ final Allocated allocated = (Allocated)localState;
+ final DOMStoreWriteTransaction tx = allocated.getTransaction();
+ if (!tx.equals(transaction)) {
+ LOG.debug("Ignoring non-latest successful transaction {} in state {}", transaction, allocated);
+ return;
+ }
+
+ if (!STATE_UPDATER.compareAndSet(this, localState, idleState)) {
+ LOG.debug("Transaction chain {} has already transitioned from {} to {}, not making it idle", this, localState, state);
+ }
+ }
+}
\ No newline at end of file
import static com.google.common.base.Preconditions.checkState;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
-import javax.annotation.concurrent.GuardedBy;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
import org.opendaylight.controller.md.sal.common.api.data.OptimisticLockFailedException;
@Override
public DOMStoreTransactionChain createTransactionChain() {
- return new DOMStoreTransactionChainImpl();
+ return new DOMStoreTransactionChainImpl(this);
}
@Override
}
}
- boolean getDebugTransactions() {
+ public final boolean getDebugTransactions() {
return debugTransactions;
}
+ final DataTreeSnapshot takeSnapshot() {
+ return dataTree.takeSnapshot();
+ }
+
@Override
public <L extends AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>> ListenerRegistration<L> registerChangeListener(
final YangInstanceIdentifier path, final L listener, final DataChangeScope scope) {
return new ThreePhaseCommitImpl(tx, tree);
}
- private Object nextIdentifier() {
+ Object nextIdentifier() {
return name + "-" + txCounter.getAndIncrement();
}
- private class DOMStoreTransactionChainImpl extends TransactionReadyPrototype implements DOMStoreTransactionChain {
- @GuardedBy("this")
- private SnapshotBackedWriteTransaction allocatedTransaction;
- @GuardedBy("this")
- private DataTreeSnapshot readySnapshot;
- @GuardedBy("this")
- private boolean chainFailed = false;
-
- @GuardedBy("this")
- private void checkFailed() {
- Preconditions.checkState(!chainFailed, "Transaction chain is failed.");
- }
-
- @GuardedBy("this")
- private DataTreeSnapshot getSnapshot() {
- checkFailed();
-
- if (allocatedTransaction != null) {
- Preconditions.checkState(readySnapshot != null, "Previous transaction %s is not ready yet", allocatedTransaction.getIdentifier());
- return readySnapshot;
- } else {
- return dataTree.takeSnapshot();
- }
- }
-
- @GuardedBy("this")
- private <T extends SnapshotBackedWriteTransaction> T recordTransaction(final T transaction) {
- allocatedTransaction = transaction;
- readySnapshot = null;
- return transaction;
- }
-
- @Override
- public synchronized DOMStoreReadTransaction newReadOnlyTransaction() {
- final DataTreeSnapshot snapshot = getSnapshot();
- return new SnapshotBackedReadTransaction(nextIdentifier(), getDebugTransactions(), snapshot);
- }
-
- @Override
- public synchronized DOMStoreReadWriteTransaction newReadWriteTransaction() {
- final DataTreeSnapshot snapshot = getSnapshot();
- return recordTransaction(new SnapshotBackedReadWriteTransaction(nextIdentifier(),
- getDebugTransactions(), snapshot, this));
- }
-
- @Override
- public synchronized DOMStoreWriteTransaction newWriteOnlyTransaction() {
- final DataTreeSnapshot snapshot = getSnapshot();
- return recordTransaction(new SnapshotBackedWriteTransaction(nextIdentifier(),
- getDebugTransactions(), snapshot, this));
- }
-
- @Override
- protected synchronized void transactionAborted(final SnapshotBackedWriteTransaction tx) {
- if (tx.equals(allocatedTransaction)) {
- Preconditions.checkState(readySnapshot == null, "Unexpected abort of transaction %s with ready snapshot %s", tx, readySnapshot);
- allocatedTransaction = null;
- }
- }
-
- @Override
- protected synchronized DOMStoreThreePhaseCommitCohort transactionReady(final SnapshotBackedWriteTransaction tx, final DataTreeModification tree) {
- Preconditions.checkState(tx.equals(allocatedTransaction), "Mis-ordered ready transaction %s last allocated was %s", tx, allocatedTransaction);
- if (readySnapshot != null) {
- // The snapshot should have been cleared
- LOG.warn("Uncleared snapshot {} encountered, overwritten with transaction {} snapshot {}", readySnapshot, tx, tree);
- }
-
- final DOMStoreThreePhaseCommitCohort cohort = InMemoryDOMDataStore.this.transactionReady(tx, tree);
- readySnapshot = tree;
- return new ChainedTransactionCommitImpl(tx, cohort, this);
- }
-
- @Override
- public void close() {
- // FIXME: this call doesn't look right here - listeningExecutor is shared and owned
- // by the outer class.
- //listeningExecutor.shutdownNow();
- }
-
- protected synchronized void onTransactionFailed(final SnapshotBackedWriteTransaction transaction,
- final Throwable t) {
- chainFailed = true;
- }
-
- public synchronized void onTransactionCommited(final SnapshotBackedWriteTransaction transaction) {
- // If the committed transaction was the one we allocated last,
- // we clear it and the ready snapshot, so the next transaction
- // allocated refers to the data tree directly.
- if (transaction.equals(allocatedTransaction)) {
- if (readySnapshot == null) {
- LOG.warn("Transaction {} committed while no ready snapshot present", transaction);
- }
-
- allocatedTransaction = null;
- readySnapshot = null;
- }
- }
- }
-
- private static class ChainedTransactionCommitImpl implements DOMStoreThreePhaseCommitCohort {
- private final SnapshotBackedWriteTransaction transaction;
- private final DOMStoreThreePhaseCommitCohort delegate;
- private final DOMStoreTransactionChainImpl txChain;
-
- protected ChainedTransactionCommitImpl(final SnapshotBackedWriteTransaction transaction,
- final DOMStoreThreePhaseCommitCohort delegate, final DOMStoreTransactionChainImpl txChain) {
- this.transaction = transaction;
- this.delegate = delegate;
- this.txChain = txChain;
- }
-
- @Override
- public ListenableFuture<Boolean> canCommit() {
- return delegate.canCommit();
- }
-
- @Override
- public ListenableFuture<Void> preCommit() {
- return delegate.preCommit();
- }
-
- @Override
- public ListenableFuture<Void> abort() {
- return delegate.abort();
- }
-
- @Override
- public ListenableFuture<Void> commit() {
- ListenableFuture<Void> commitFuture = delegate.commit();
- Futures.addCallback(commitFuture, new FutureCallback<Void>() {
- @Override
- public void onFailure(final Throwable t) {
- txChain.onTransactionFailed(transaction, t);
- }
-
- @Override
- public void onSuccess(final Void result) {
- txChain.onTransactionCommited(transaction);
- }
- });
- return commitFuture;
- }
- }
-
- private class ThreePhaseCommitImpl implements DOMStoreThreePhaseCommitCohort {
+ private final class ThreePhaseCommitImpl implements DOMStoreThreePhaseCommitCohort {
private final SnapshotBackedWriteTransaction transaction;
private final DataTreeModification modification;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.Response;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import javax.xml.stream.FactoryConfigurationError;
import org.opendaylight.controller.sal.restconf.impl.InstanceIdentifierContext;
import org.opendaylight.controller.sal.restconf.impl.NormalizedNodeContext;
import org.opendaylight.controller.sal.restconf.impl.RestconfDocumentedException;
+import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorTag;
+import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorType;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
WebApplicationException {
InstanceIdentifierContext pathContext = t.getInstanceIdentifierContext();
if (t.getData() == null) {
- throw new RestconfDocumentedException(Response.Status.NOT_FOUND);
+ throw new RestconfDocumentedException(
+ "Request could not be completed because the relevant data model content does not exist.",
+ ErrorType.APPLICATION, ErrorTag.DATA_MISSING);
}
XMLStreamWriter xmlWriter;
private CheckedFuture<Void, TransactionCommitFailedException> deleteDataViaTransaction(
final DOMDataWriteTransaction writeTransaction, final LogicalDatastoreType datastore,
YangInstanceIdentifier path) {
- LOG.info("Delete " + datastore.name() + " via Restconf: {}", path);
+ LOG.trace("Delete " + datastore.name() + " via Restconf: {}", path);
writeTransaction.delete(datastore, path);
return writeTransaction.submit();
}
for (DataSchemaNode childNode : module.getChildNodes()) {
// For every container and list in the module
- processDataNodeContainer((DataNodeContainer) childNode, moduleName, models, true, schemaContext);
- processDataNodeContainer((DataNodeContainer) childNode, moduleName, models, false, schemaContext);
+ if (childNode instanceof ContainerSchemaNode || childNode instanceof ListSchemaNode) {
+ processDataNodeContainer((DataNodeContainer) childNode, moduleName, models, true, schemaContext);
+ processDataNodeContainer((DataNodeContainer) childNode, moduleName, models, false, schemaContext);
+ }
}
}
property.put(TYPE_KEY, childNode instanceof ListSchemaNode ? ARRAY_TYPE : OBJECT_TYPE);
property.put(ITEMS_KEY, items);
properties.put(childNode.getQName().getLocalName(), property);
+ } else if (childNode instanceof LeafSchemaNode){
+ JSONObject property = processLeafNode((LeafSchemaNode)childNode);
+ properties.put(childNode.getQName().getLocalName(), property);
}
}
return properties;
--- /dev/null
+package org.opendaylight.controller.sal.rest.doc.impl;
+
+import com.google.common.base.Preconditions;
+import org.json.JSONObject;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.opendaylight.yangtools.yang.model.api.Module;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import org.opendaylight.yangtools.yang.parser.impl.YangParserImpl;
+
+import java.io.File;
+import java.util.HashSet;
+import java.util.Map;
+
+
+public class ModelGeneratorTest {
+
+ private DocGenTestHelper helper;
+ private SchemaContext schemaContext;
+
+ @Before
+ public void setUp() throws Exception {
+ helper = new DocGenTestHelper();
+ helper.setUp();
+ schemaContext = new YangParserImpl().resolveSchemaContext(new HashSet<Module>(helper.getModules().values()));
+ }
+
+ @Test
+ public void testConvertToJsonSchema() throws Exception {
+
+ Preconditions.checkArgument(helper.getModules() != null, "No modules found");
+
+ ModelGenerator generator = new ModelGenerator();
+
+ for (Map.Entry<File, Module> m : helper.getModules().entrySet()) {
+ if (m.getKey().getAbsolutePath().endsWith("opflex.yang")) {
+
+ JSONObject jsonObject = generator.convertToJsonSchema(m.getValue(), schemaContext);
+ Assert.assertNotNull(jsonObject);
+ }
+ }
+
+ }
+}
\ No newline at end of file
--- /dev/null
+module opflex {
+ yang-version 1;
+
+ namespace "urn:opendaylight:groupbasedpolicy:opflex";
+ prefix "opflex";
+
+
+
+
+
+ description
+ "This module defines the group-based policy OpFlex renderer model.";
+
+ revision "2014-05-28" {
+ description
+ "Initial revision.";
+ }
+
+ typedef serialization {
+ description
+ "The serialization to use for OpFlex messages.";
+
+ type enumeration {
+ enum json {
+ description
+ "JSON 1.0 serialization.";
+ }
+ enum xml {
+ description
+ "XML serialization.";
+ }
+ enum binary {
+ description
+ "OpFlex binary serialization.";
+ }
+ }
+ }
+
+ // ******************
+ // Configuration Data
+ // ******************
+ leaf domain {
+ description
+ "The OpFlex administrative domain.";
+
+ config true;
+
+ type string;
+ }
+}
\ No newline at end of file
"Toaster module in progress.";
}
+ leaf domain {
+ description
+ "Toaster domain.";
+
+ config true;
+
+ type string;
+ }
identity toast-type {
description
"Base for all bread types supported by the toaster.
- New bread types not listed here nay be added in the
+ New bread types not listed here nay be added in the
future.";
}
"Indicates the toaster service is available";
description
"Top-level container for all toaster database objects.";
-
+
leaf testToasterBits {
type bits {
bit testbit1 {
}
default "testbit2";
}
-
+
leaf testUnion {
type union {
type int32;
type string;
}
-
- }
-
+
+ }
+
leaf-list allow-user {
type string;
description "A list of user name patterns to allow";
-
+
}
-
+
choice how {
default interval;
case interval {
type string;
}
}
- }
-
+ }
+
leaf toasterManufacturer {
type DisplayString;
config false;
mandatory true;
description
- "The name of the toaster's manufacturer. For instance,
+ "The name of the toaster's manufacturer. For instance,
Microsoft Toaster.";
}
config false;
mandatory true;
description
- "This variable indicates the current state of
+ "This variable indicates the current state of
the toaster.";
}
}
rpc make-toast {
description
"Make some toast.
- The toastDone notification will be sent when
+ The toastDone notification will be sent when
the toast is finished.
An 'in-use' error will be returned if toast
is already being made.
- A 'resource-denied' error will be returned
+ A 'resource-denied' error will be returned
if the toaster service is disabled.";
input {
leaf toasterDoneness {
}
default '5';
description
- "This variable controls how well-done is the
+ "This variable controls how well-done is the
ensuing toast. It should be on a scale of 1 to 10.
- Toast made at 10 generally is considered unfit
- for human consumption; toast made at 1 is warmed
+ Toast made at 10 generally is considered unfit
+ for human consumption; toast made at 1 is warmed
lightly.";
}
}
default 'wheat-bread';
description
- "This variable informs the toaster of the type of
- material that is being toasted. The toaster
- uses this information, combined with
- toasterDoneness, to compute for how
- long the material must be toasted to achieve
+ "This variable informs the toaster of the type of
+ material that is being toasted. The toaster
+ uses this information, combined with
+ toasterDoneness, to compute for how
+ long the material must be toasted to achieve
the required doneness.";
}
}
- }
+ }
rpc cancel-toast {
description
"Stop making toast, if any is being made.
- A 'resource-denied' error will be returned
+ A 'resource-denied' error will be returned
if the toaster service is disabled.";
- }
-
+ }
+
notification toastDone {
description
"Indicates that the toast in progress has completed.";
description
"Indicates the final toast status";
}
- }
- }
+ }
+ }
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl;
+
+import com.google.common.base.Optional;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import io.netty.channel.Channel;
+import java.util.List;
+import java.util.Set;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.opendaylight.controller.netconf.api.monitoring.NetconfManagementSession;
+import org.opendaylight.controller.netconf.impl.NetconfServerSession;
+import org.opendaylight.controller.netconf.impl.NetconfServerSessionListener;
+import org.opendaylight.controller.netconf.impl.osgi.NetconfMonitoringServiceImpl;
+import org.opendaylight.controller.netconf.mapping.api.Capability;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationProvider;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationService;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
+import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessageAdditionalHeader;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.*;
+
+public class NetconfMonitoringServiceImplTest {
+
+ private NetconfMonitoringServiceImpl service;
+
+ @Mock
+ private NetconfOperationProvider operationProvider;
+ @Mock
+ private NetconfManagementSession managementSession;
+ @Mock
+ private NetconfOperationServiceSnapshot snapshot;
+ @Mock
+ private NetconfOperationService operationService;
+
+ @Before
+ public void setUp() throws Exception {
+ MockitoAnnotations.initMocks(this);
+ service = new NetconfMonitoringServiceImpl(operationProvider);
+ }
+
+ @Test
+ public void testSessions() throws Exception {
+ doReturn("sessToStr").when(managementSession).toString();
+ service.onSessionUp(managementSession);
+ List list = Lists.newArrayList(managementSession);
+ }
+
+ @Test(expected = RuntimeException.class)
+ public void testGetSchemas() throws Exception {
+ doThrow(RuntimeException.class).when(operationProvider).openSnapshot(anyString());
+ service.getSchemas();
+ }
+
+ @Test(expected = IllegalStateException.class)
+ public void testGetSchemas2() throws Exception {
+ doThrow(Exception.class).when(operationProvider).openSnapshot(anyString());
+ service.getSchemas();
+ }
+
+ @Test
+ public void testGetSchemas3() throws Exception {
+ doReturn("").when(managementSession).toString();
+ Capability cap = mock(Capability.class);
+ Set caps = Sets.newHashSet(cap);
+ Set services = Sets.newHashSet(operationService);
+ doReturn(snapshot).when(operationProvider).openSnapshot(anyString());
+ doReturn(services).when(snapshot).getServices();
+ doReturn(caps).when(operationService).getCapabilities();
+ Optional opt = mock(Optional.class);
+ doReturn(opt).when(cap).getCapabilitySchema();
+ doReturn(true).when(opt).isPresent();
+ doReturn(opt).when(cap).getModuleNamespace();
+ doReturn("namespace").when(opt).get();
+ Optional optRev = Optional.of("rev");
+ doReturn(optRev).when(cap).getRevision();
+ doReturn(Optional.of("modName")).when(cap).getModuleName();
+ doReturn(Optional.of(Lists.newArrayList("loc"))).when(cap).getLocation();
+ doNothing().when(snapshot).close();
+
+ assertNotNull(service.getSchemas());
+ verify(snapshot, times(1)).close();
+
+ NetconfServerSessionListener sessionListener = mock(NetconfServerSessionListener.class);
+ Channel channel = mock(Channel.class);
+ NetconfHelloMessageAdditionalHeader header = new NetconfHelloMessageAdditionalHeader("name", "addr", "2", "tcp", "id");
+ NetconfServerSession sm = new NetconfServerSession(sessionListener, channel, 10, header);
+ doNothing().when(sessionListener).onSessionUp(any(NetconfServerSession.class));
+ sm.sessionUp();
+ service.onSessionUp(sm);
+ assertEquals(1, service.getSessions().getSession().size());
+
+ assertEquals(Long.valueOf(10), service.getSessions().getSession().get(0).getSessionId());
+
+ service.onSessionDown(sm);
+ assertEquals(0, service.getSessions().getSession().size());
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.mapping.operations;
+
+import org.junit.Test;
+import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+
+public class DefaultCloseSessionTest {
+ @Test
+ public void testDefaultCloseSession() throws Exception {
+ AutoCloseable res = mock(AutoCloseable.class);
+ doNothing().when(res).close();
+ DefaultCloseSession session = new DefaultCloseSession("", res);
+ Document doc = XmlUtil.newDocument();
+ XmlElement elem = XmlElement.fromDomElement(XmlUtil.readXmlToElement("<elem/>"));
+ session.handleWithNoSubsequentOperations(doc, elem);
+ }
+
+ @Test(expected = NetconfDocumentedException.class)
+ public void testDefaultCloseSession2() throws Exception {
+ AutoCloseable res = mock(AutoCloseable.class);
+ doThrow(NetconfDocumentedException.class).when(res).close();
+ DefaultCloseSession session = new DefaultCloseSession("", res);
+ Document doc = XmlUtil.newDocument();
+ XmlElement elem = XmlElement.fromDomElement(XmlUtil.readXmlToElement("<elem/>"));
+ session.handleWithNoSubsequentOperations(doc, elem);
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.mapping.operations;
+
+import com.google.common.collect.Sets;
+import org.junit.Before;
+import org.junit.Test;
+import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
+import org.opendaylight.controller.netconf.impl.DefaultCommitNotificationProducer;
+import org.opendaylight.controller.netconf.impl.NetconfServerSession;
+import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
+import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationRouter;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationChainedExecution;
+import org.opendaylight.controller.netconf.util.test.XmlFileLoader;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+import static org.mockito.Mockito.*;
+
+public class DefaultCommitTest {
+
+ private NetconfOperationChainedExecution operation;
+ private Document requestMessage;
+ private NetconfOperationRouter router;
+ private DefaultCommitNotificationProducer notifier;
+ private CapabilityProvider cap;
+ private DefaultCommit commit;
+
+ @Before
+ public void setUp() throws Exception {
+ operation = mock(NetconfOperationChainedExecution.class);
+ doReturn(XmlUtil.newDocument()).when(operation).execute(any(Document.class));
+ router = mock(NetconfOperationRouter.class);
+ doReturn(false).when(operation).isExecutionTermination();
+ notifier = mock(DefaultCommitNotificationProducer.class);
+ doNothing().when(notifier).sendCommitNotification(anyString(), any(Element.class), anySetOf(String.class));
+ cap = mock(CapabilityProvider.class);
+ doReturn(Sets.newHashSet()).when(cap).getCapabilities();
+ Document rpcData = XmlFileLoader.xmlFileToDocument("netconfMessages/editConfig_expectedResult.xml");
+ doReturn(rpcData).when(router).onNetconfMessage(any(Document.class), any(NetconfServerSession.class));
+ commit = new DefaultCommit(notifier, cap, "", router);
+ }
+
+ @Test
+ public void testHandleWithNotification() throws Exception {
+ requestMessage = XmlFileLoader.xmlFileToDocument("netconfMessages/commit.xml");
+ commit.handle(requestMessage, operation);
+ verify(operation, times(1)).execute(requestMessage);
+ verify(notifier, times(1)).sendCommitNotification(anyString(), any(Element.class), anySetOf(String.class));
+ }
+
+ @Test
+ public void testHandleWithoutNotification() throws Exception {
+ requestMessage = XmlFileLoader.xmlFileToDocument("netconfMessages/commit.xml");
+ Element elem = requestMessage.getDocumentElement();
+ elem.setAttribute("notify", "false");
+ commit.handle(requestMessage, operation);
+ verify(operation, times(1)).execute(requestMessage);
+ verify(notifier, never()).sendCommitNotification(anyString(), any(Element.class), anySetOf(String.class));
+ }
+
+ @Test(expected = NetconfDocumentedException.class)
+ public void testHandle() throws Exception {
+ Document rpcData = XmlFileLoader.xmlFileToDocument("netconfMessages/get.xml");
+ doReturn(rpcData).when(router).onNetconfMessage(any(Document.class), any(NetconfServerSession.class));
+ requestMessage = XmlFileLoader.xmlFileToDocument("netconfMessages/commit.xml");
+ commit.handle(requestMessage, operation);
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.mapping.operations;
+
+import com.google.common.base.Optional;
+import junit.framework.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
+import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.w3c.dom.Document;
+
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+
+public class DefaultGetSchemaTest {
+
+ private CapabilityProvider cap;
+ private Document doc;
+ private String getSchema;
+
+ @Before
+ public void setUp() throws Exception {
+ cap = mock(CapabilityProvider.class);
+ doc = XmlUtil.newDocument();
+ getSchema = "<get-schema xmlns=\"urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring\">\n" +
+ " <identifier>threadpool-api</identifier>\n" +
+ " <version>2010-09-24</version>\n" +
+ " <format\n" +
+ " xmlns:ncm=\"urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring\">ncm:yang\n" +
+ " </format>\n" +
+ " </get-schema>";
+ }
+
+ @Test(expected = NetconfDocumentedException.class)
+ public void testDefaultGetSchema() throws Exception {
+ DefaultGetSchema schema = new DefaultGetSchema(cap, "");
+ doThrow(IllegalStateException.class).when(cap).getSchemaForCapability(anyString(), any(Optional.class));
+ schema.handleWithNoSubsequentOperations(doc, XmlElement.fromDomElement(XmlUtil.readXmlToElement(getSchema)));
+ }
+
+ @Test
+ public void handleWithNoSubsequentOperations() throws Exception {
+ DefaultGetSchema schema = new DefaultGetSchema(cap, "");
+ doReturn("").when(cap).getSchemaForCapability(anyString(), any(Optional.class));
+ assertNotNull(schema.handleWithNoSubsequentOperations(doc, XmlElement.fromDomElement(XmlUtil.readXmlToElement(getSchema))));
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.mapping.operations;
+
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandler;
+import io.netty.channel.ChannelPipeline;
+import org.junit.Test;
+import org.opendaylight.controller.netconf.impl.NetconfServerSession;
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.w3c.dom.Document;
+
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.*;
+
+public class DefaultStopExiTest {
+ @Test
+ public void testHandleWithNoSubsequentOperations() throws Exception {
+ DefaultStopExi exi = new DefaultStopExi("");
+ Document doc = XmlUtil.newDocument();
+ Channel channel = mock(Channel.class);
+ ChannelPipeline pipeline = mock(ChannelPipeline.class);
+ doReturn(pipeline).when(channel).pipeline();
+ ChannelHandler channelHandler = mock(ChannelHandler.class);
+ doReturn(channelHandler).when(pipeline).replace(anyString(), anyString(), any(ChannelHandler.class));
+
+ NetconfServerSession serverSession = new NetconfServerSession(null, channel, 2L, null);
+ exi.setNetconfSession(serverSession);
+
+ assertNotNull(exi.handleWithNoSubsequentOperations(doc, XmlElement.fromDomElement(XmlUtil.readXmlToElement("<elem/>"))));
+ verify(pipeline, times(1)).replace(anyString(), anyString(), any(ChannelHandler.class));
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.osgi;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Dictionary;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.opendaylight.controller.netconf.api.monitoring.NetconfMonitoringService;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationProvider;
+import org.osgi.framework.*;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.*;
+
+public class NetconfImplActivatorTest {
+
+ @Mock
+ private BundleContext bundle;
+ @Mock
+ private Filter filter;
+ @Mock
+ private ServiceReference reference;
+ @Mock
+ private ServiceRegistration registration;
+
+ @Before
+ public void setUp() throws Exception {
+ MockitoAnnotations.initMocks(this);
+ doReturn(filter).when(bundle).createFilter(anyString());
+ doNothing().when(bundle).addServiceListener(any(ServiceListener.class), anyString());
+
+ ServiceReference[] refs = new ServiceReference[0];
+ doReturn(refs).when(bundle).getServiceReferences(anyString(), anyString());
+ doReturn(Arrays.asList(refs)).when(bundle).getServiceReferences(any(Class.class), anyString());
+ doReturn("").when(bundle).getProperty(anyString());
+ doReturn(registration).when(bundle).registerService(any(Class.class), any(NetconfOperationServiceFactoryListenerImpl.class), any(Dictionary.class));
+ doNothing().when(registration).unregister();
+ doNothing().when(bundle).removeServiceListener(any(ServiceListener.class));
+ }
+
+ @Test
+ public void testStart() throws Exception {
+ NetconfImplActivator activator = new NetconfImplActivator();
+ activator.start(bundle);
+ verify(bundle, times(2)).registerService(any(Class.class), any(NetconfOperationServiceFactoryListenerImpl.class), any(Dictionary.class));
+ activator.stop(bundle);
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.osgi;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceFactory;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.Filter;
+import org.osgi.framework.ServiceReference;
+
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.*;
+
+public class NetconfOperationServiceFactoryTrackerTest {
+
+ @Mock
+ private Filter filter;
+ @Mock
+ private BundleContext context;
+ @Mock
+ private NetconfOperationServiceFactoryListener listener;
+ @Mock
+ private NetconfOperationServiceFactory factory;
+ @Mock
+ private ServiceReference reference;
+
+ private NetconfOperationServiceFactoryTracker tracker;
+
+ @Before
+ public void setUp() throws Exception {
+ MockitoAnnotations.initMocks(this);
+ doNothing().when(listener).onRemoveNetconfOperationServiceFactory(any(NetconfOperationServiceFactory.class));
+ doReturn(filter).when(context).createFilter(anyString());
+ doReturn("").when(reference).toString();
+ doReturn(factory).when(context).getService(any(ServiceReference.class));
+ doReturn("").when(factory).toString();
+ doNothing().when(listener).onAddNetconfOperationServiceFactory(any(NetconfOperationServiceFactory.class));
+ tracker = new NetconfOperationServiceFactoryTracker(context, listener);
+ }
+
+ @Test
+ public void testNetconfOperationServiceFactoryTracker() throws Exception {
+ tracker.removedService(null, factory);
+ verify(listener, times(1)).onRemoveNetconfOperationServiceFactory(any(NetconfOperationServiceFactory.class));
+ }
+
+ @Test
+ public void testAddingService() throws Exception {
+ assertNotNull(tracker.addingService(reference));
+ verify(listener, times(1)).onAddNetconfOperationServiceFactory(any(NetconfOperationServiceFactory.class));
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.util;
+
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.util.concurrent.GenericFutureListener;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.mockito.Mockito.*;
+
+public class DeserializerExceptionHandlerTest {
+
+ private DeserializerExceptionHandler handler;
+ private ChannelFuture channelFuture;
+ private ChannelHandlerContext context;
+ private Channel channel;
+
+ @Before
+ public void setUp() throws Exception {
+ handler = new DeserializerExceptionHandler();
+ context = mock(ChannelHandlerContext.class);
+ channel = mock(Channel.class);
+ doReturn(channel).when(context).channel();
+ channelFuture = mock(ChannelFuture.class);
+ doReturn(channelFuture).when(channelFuture).addListener(any(GenericFutureListener.class));
+ doReturn(channelFuture).when(channel).writeAndFlush(anyObject());
+ }
+
+ @Test
+ public void testExceptionCaught() throws Exception {
+ handler.exceptionCaught(context, new Exception());
+ verify(context, times(1)).channel();
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.monitoring.osgi;
+
+import java.util.Arrays;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.Filter;
+import org.osgi.framework.ServiceListener;
+import org.osgi.framework.ServiceReference;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.*;
+
+public class NetconfMonitoringActivatorTest {
+
+ @Mock
+ BundleContext context;
+ @Mock
+ Filter filter;
+
+ @Before
+ public void setUp() throws Exception {
+ MockitoAnnotations.initMocks(this);
+ doReturn(filter).when(context).createFilter(anyString());
+ doNothing().when(context).addServiceListener(any(ServiceListener.class), anyString());
+ ServiceReference[] refs = new ServiceReference[2];
+ doReturn(Arrays.asList(refs)).when(context).getServiceReferences(any(Class.class), anyString());
+ doReturn(refs).when(context).getServiceReferences(anyString(), anyString());
+ }
+
+ @Test
+ public void testNetconfMonitoringActivator() throws Exception {
+ NetconfMonitoringActivator activator = new NetconfMonitoringActivator();
+ activator.start(context);
+ verify(context, times(1)).addServiceListener(any(ServiceListener.class), anyString());
+
+ activator.stop(context);
+ verify(context, times(1)).removeServiceListener(any(ServiceListener.class));
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.monitoring.osgi;
+
+import com.google.common.base.Optional;
+import org.junit.Test;
+import org.opendaylight.controller.netconf.api.monitoring.NetconfMonitoringService;
+import org.opendaylight.controller.netconf.monitoring.MonitoringConstants;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+
+public class NetconfMonitoringOperationServiceTest {
+ @Test
+ public void testGetters() throws Exception {
+ NetconfMonitoringService monitor = mock(NetconfMonitoringService.class);
+ NetconfMonitoringOperationService service = new NetconfMonitoringOperationService(monitor);
+
+ assertEquals(1, service.getNetconfOperations().size());
+
+ assertEquals(Optional.absent(), service.getCapabilities().iterator().next().getCapabilitySchema());
+ assertEquals(Optional.absent(), service.getCapabilities().iterator().next().getLocation());
+ assertEquals(Optional.of(MonitoringConstants.MODULE_REVISION), service.getCapabilities().iterator().next().getRevision());
+ assertEquals(Optional.of(MonitoringConstants.MODULE_NAME), service.getCapabilities().iterator().next().getModuleName());
+ assertEquals(Optional.of(MonitoringConstants.NAMESPACE), service.getCapabilities().iterator().next().getModuleNamespace());
+ assertEquals(MonitoringConstants.URI, service.getCapabilities().iterator().next().getCapabilityUri());
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.monitoring.osgi;
+
+import java.util.Hashtable;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.opendaylight.controller.netconf.api.monitoring.NetconfMonitoringService;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceFactory;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.Filter;
+import org.osgi.framework.ServiceReference;
+import org.osgi.framework.ServiceRegistration;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyCollection;
+import static org.mockito.Mockito.*;
+
+public class NetconfMonitoringServiceTrackerTest {
+
+ @Mock
+ private ServiceReference reference;
+ @Mock
+ private BundleContext context;
+ @Mock
+ private ServiceRegistration serviceRegistration;
+ @Mock
+ private Filter filter;
+ @Mock
+ private NetconfMonitoringService monitoringService;
+
+ @Before
+ public void setUp() throws Exception {
+ MockitoAnnotations.initMocks(this);
+ doReturn(serviceRegistration).when(context).registerService(any(Class.class), any(NetconfOperationServiceFactory.class), any(Hashtable.class));
+ doNothing().when(serviceRegistration).unregister();
+ doReturn(filter).when(context).createFilter(anyString());
+ doReturn("").when(reference).toString();
+ doReturn(monitoringService).when(context).getService(any(ServiceReference.class));
+ }
+
+ @Test
+ public void testAddingService() throws Exception {
+ NetconfMonitoringServiceTracker tracker = new NetconfMonitoringServiceTracker(context);
+ tracker.addingService(reference);
+ verify(context, times(1)).registerService(any(Class.class), any(NetconfOperationServiceFactory.class), any(Hashtable.class));
+ tracker.removedService(reference, null);
+ verify(serviceRegistration, times(1)).unregister();
+ }
+}
<artifactId>logback-classic</artifactId>
<scope>compile</scope>
</dependency>
+ <dependency>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcpkix-jdk15on</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcprov-jdk15on</artifactId>
+ </dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-netty-util</artifactId>
</dependency>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>netconf-auth</artifactId>
+ </dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.logback_settings</artifactId>
<groupId>xmlunit</groupId>
<artifactId>xmlunit</artifactId>
</dependency>
-
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ </dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>config-util</artifactId>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-api</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>ietf-netconf-monitoring</artifactId>
+ </dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-client</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools.model</groupId>
+ <artifactId>ietf-yang-types-20130715</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools.model</groupId>
+ <artifactId>ietf-inet-types</artifactId>
+ </dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-impl</artifactId>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-monitoring</artifactId>
</dependency>
-
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-ssh</artifactId>
</dependency>
-
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netty-config-api</artifactId>
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
+import ch.qos.logback.classic.Level;
+import com.google.common.base.Charsets;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.io.ByteStreams;
+import com.google.common.io.CharStreams;
import com.google.common.io.Files;
import java.io.File;
+import java.io.FileFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
+import java.util.Collections;
+import java.util.Comparator;
import java.util.List;
-
import java.util.concurrent.TimeUnit;
import net.sourceforge.argparse4j.ArgumentParsers;
import net.sourceforge.argparse4j.annotation.Arg;
import net.sourceforge.argparse4j.inf.ArgumentParser;
import net.sourceforge.argparse4j.inf.ArgumentParserException;
-
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Charsets;
-import com.google.common.io.CharStreams;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.SAXException;
public final class Main {
- // TODO add logback config
-
- // TODO make exi configurable
-
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
static class Params {
@Arg(dest = "generate-config-address")
public String generateConfigsAddress;
- @Arg(dest = "generate-configs-dir")
- public File generateConfigsDir;
+ @Arg(dest = "distro-folder")
+ public File distroFolder;
@Arg(dest = "generate-configs-batch-size")
public int generateConfigBatchSize;
@Arg(dest = "exi")
public boolean exi;
+ @Arg(dest = "debug")
+ public boolean debug;
+
static ArgumentParser getParser() {
final ArgumentParser parser = ArgumentParsers.newArgumentParser("netconf testool");
- parser.addArgument("--devices-count")
+
+ parser.description("Netconf device simulator. Detailed info can be found at https://wiki.opendaylight.org/view/OpenDaylight_Controller:Netconf:Testtool#Building_testtool");
+
+ parser.addArgument("--device-count")
.type(Integer.class)
.setDefault(1)
.type(Integer.class)
parser.addArgument("--schemas-dir")
.type(File.class)
- .required(true)
- .help("Directory containing yang schemas to describe simulated devices")
+ .help("Directory containing yang schemas to describe simulated devices. Some schemas e.g. netconf monitoring and inet types are included by default")
.dest("schemas-dir");
parser.addArgument("--starting-port")
parser.addArgument("--generate-config-connection-timeout")
.type(Integer.class)
- .setDefault((int)TimeUnit.MINUTES.toMillis(5))
+ .setDefault((int)TimeUnit.MINUTES.toMillis(30))
.help("Timeout to be generated in initial config files")
.dest("generate-config-connection-timeout");
parser.addArgument("--generate-configs-batch-size")
.type(Integer.class)
- .setDefault(100)
+ .setDefault(4000)
.help("Number of connector configs per generated file")
.dest("generate-configs-batch-size");
- parser.addArgument("--generate-configs-dir")
+ parser.addArgument("--distribution-folder")
.type(File.class)
- .help("Directory where initial config files for ODL distribution should be generated")
- .dest("generate-configs-dir");
+ .help("Directory where the karaf distribution for controller is located")
+ .dest("distro-folder");
parser.addArgument("--ssh")
.type(Boolean.class)
parser.addArgument("--exi")
.type(Boolean.class)
- .setDefault(false)
+ .setDefault(true)
.help("Whether to use exi to transport xml content")
.dest("exi");
+ parser.addArgument("--debug")
+ .type(Boolean.class)
+ .setDefault(false)
+ .help("Whether to use debug log level instead of INFO")
+ .dest("debug");
+
return parser;
}
checkArgument(deviceCount > 0, "Device count has to be > 0");
checkArgument(startingPort > 1024, "Starting port has to be > 1024");
- checkArgument(schemasDir.exists(), "Schemas dir has to exist");
- checkArgument(schemasDir.isDirectory(), "Schemas dir has to be a directory");
- checkArgument(schemasDir.canRead(), "Schemas dir has to be readable");
+ if(schemasDir != null) {
+ checkArgument(schemasDir.exists(), "Schemas dir has to exist");
+ checkArgument(schemasDir.isDirectory(), "Schemas dir has to be a directory");
+ checkArgument(schemasDir.canRead(), "Schemas dir has to be readable");
+ }
}
}
public static void main(final String[] args) {
- ch.ethz.ssh2.log.Logger.enabled = true;
-
final Params params = parseArgs(args, Params.getParser());
params.validate();
+ final ch.qos.logback.classic.Logger root = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME);
+ root.setLevel(params.debug ? Level.DEBUG : Level.INFO);
+
final NetconfDeviceSimulator netconfDeviceSimulator = new NetconfDeviceSimulator();
try {
final List<Integer> openDevices = netconfDeviceSimulator.start(params);
- if(params.generateConfigsDir != null) {
- new ConfigGenerator(params.generateConfigsDir, openDevices).generate(params.ssh, params.generateConfigBatchSize, params.generateConfigsTimeout, params.generateConfigsAddress);
+ if(params.distroFolder != null) {
+ final ConfigGenerator configGenerator = new ConfigGenerator(params.distroFolder, openDevices);
+ final List<File> generated = configGenerator.generate(params.ssh, params.generateConfigBatchSize, params.generateConfigsTimeout, params.generateConfigsAddress);
+ configGenerator.updateFeatureFile(generated);
+ configGenerator.changeLoadOrder();
}
} catch (final Exception e) {
LOG.error("Unhandled exception", e);
}
}
-
private static Params parseArgs(final String[] args, final ArgumentParser parser) {
final Params opt = new Params();
try {
public static final String NETCONF_USE_SSH = "false";
public static final String SIM_DEVICE_SUFFIX = "-sim-device";
- private final File directory;
+ private static final String SIM_DEVICE_CFG_PREFIX = "simulated-devices_";
+ private static final String ETC_KARAF_PATH = "etc/";
+ private static final String ETC_OPENDAYLIGHT_KARAF_PATH = ETC_KARAF_PATH + "opendaylight/karaf/";
+
+ public static final String NETCONF_CONNECTOR_ALL_FEATURE = "odl-netconf-connector-all";
+ private static final String ORG_OPS4J_PAX_URL_MVN_CFG = "org.ops4j.pax.url.mvn.cfg";
+
+ private final File configDir;
private final List<Integer> openDevices;
+ private final File ncFeatureFile;
+ private final File etcDir;
+ private final File loadOrderCfgFile;
public ConfigGenerator(final File directory, final List<Integer> openDevices) {
- this.directory = directory;
+ this.configDir = new File(directory, ETC_OPENDAYLIGHT_KARAF_PATH);
+ this.etcDir = new File(directory, ETC_KARAF_PATH);
+ this.loadOrderCfgFile = new File(etcDir, ORG_OPS4J_PAX_URL_MVN_CFG);
+ this.ncFeatureFile = getFeatureFile(directory, "features-netconf-connector");
this.openDevices = openDevices;
}
- public void generate(final boolean useSsh, final int batchSize, final int generateConfigsTimeout, final String address) {
- if(directory.exists() == false) {
- checkState(directory.mkdirs(), "Unable to create folder %s" + directory);
+ public List<File> generate(final boolean useSsh, final int batchSize, final int generateConfigsTimeout, final String address) {
+ if(configDir.exists() == false) {
+ Preconditions.checkState(configDir.mkdirs(), "Unable to create directory " + configDir);
+ }
+
+ for (final File file : configDir.listFiles(new FileFilter() {
+ @Override
+ public boolean accept(final File pathname) {
+ return !pathname.isDirectory() && pathname.getName().startsWith(SIM_DEVICE_CFG_PREFIX);
+ }
+ })) {
+ Preconditions.checkState(file.delete(), "Unable to clean previous generated file %s", file);
}
try(InputStream stream = Main.class.getResourceAsStream(NETCONF_CONNECTOR_XML)) {
checkNotNull(stream, "Cannot load %s", NETCONF_CONNECTOR_XML);
String configBlueprint = CharStreams.toString(new InputStreamReader(stream, Charsets.UTF_8));
- // TODO make address configurable
checkState(configBlueprint.contains(NETCONF_CONNECTOR_NAME));
checkState(configBlueprint.contains(NETCONF_CONNECTOR_PORT));
checkState(configBlueprint.contains(NETCONF_USE_SSH));
StringBuilder b = new StringBuilder();
b.append(before);
+ final List<File> generatedConfigs = Lists.newArrayList();
+
for (final Integer openDevice : openDevices) {
if(batchStart == null) {
batchStart = openDevice;
connectorCount++;
if(connectorCount == batchSize) {
b.append(after);
- Files.write(b.toString(), new File(directory, String.format("simulated-devices_%d-%d.xml", batchStart, openDevice)), Charsets.UTF_8);
+ final File to = new File(configDir, String.format(SIM_DEVICE_CFG_PREFIX + "%d-%d.xml", batchStart, openDevice));
+ generatedConfigs.add(to);
+ Files.write(b.toString(), to, Charsets.UTF_8);
connectorCount = 0;
b = new StringBuilder();
b.append(before);
// Write remaining
if(connectorCount != 0) {
b.append(after);
- Files.write(b.toString(), new File(directory, String.format("simulated-devices_%d-%d.xml", batchStart, openDevices.get(openDevices.size() - 1))), Charsets.UTF_8);
+ final File to = new File(configDir, String.format(SIM_DEVICE_CFG_PREFIX + "%d-%d.xml", batchStart, openDevices.get(openDevices.size() - 1)));
+ generatedConfigs.add(to);
+ Files.write(b.toString(), to, Charsets.UTF_8);
}
- LOG.info("Config files generated in {}", directory);
+ LOG.info("Config files generated in {}", configDir);
+ return generatedConfigs;
} catch (final IOException e) {
throw new RuntimeException("Unable to generate config files", e);
}
}
+
+
+ public void updateFeatureFile(final List<File> generated) {
+ // TODO karaf core contains jaxb for feature files, use that for modification
+ try {
+ final Document document = XmlUtil.readXmlToDocument(Files.toString(ncFeatureFile, Charsets.UTF_8));
+ final NodeList childNodes = document.getDocumentElement().getChildNodes();
+
+ for (int i = 0; i < childNodes.getLength(); i++) {
+ final Node item = childNodes.item(i);
+ if(item instanceof Element == false) {
+ continue;
+ }
+ if(item.getLocalName().equals("feature") ==false) {
+ continue;
+ }
+
+ if(NETCONF_CONNECTOR_ALL_FEATURE.equals(((Element) item).getAttribute("name"))) {
+ final Element ncAllFeatureDefinition = (Element) item;
+ // Clean previous generated files
+ for (final XmlElement configfile : XmlElement.fromDomElement(ncAllFeatureDefinition).getChildElements("configfile")) {
+ ncAllFeatureDefinition.removeChild(configfile.getDomElement());
+ }
+ for (final File file : generated) {
+ final Element configfile = document.createElement("configfile");
+ configfile.setTextContent("file:" + ETC_OPENDAYLIGHT_KARAF_PATH + file.getName());
+ configfile.setAttribute("finalname", ETC_OPENDAYLIGHT_KARAF_PATH + file.getName());
+ ncAllFeatureDefinition.appendChild(configfile);
+ }
+ }
+ }
+
+ Files.write(XmlUtil.toString(document), ncFeatureFile, Charsets.UTF_8);
+ LOG.info("Feature file {} updated", ncFeatureFile);
+ } catch (final IOException e) {
+ throw new RuntimeException("Unable to load features file as a resource");
+ } catch (final SAXException e) {
+ throw new RuntimeException("Unable to parse features file");
+ }
+ }
+
+
+ private static File getFeatureFile(final File distroFolder, final String featureName) {
+ checkExistingDir(distroFolder, String.format("Folder %s does not exist", distroFolder));
+
+ final File systemDir = checkExistingDir(new File(distroFolder, "system"), String.format("Folder %s does not contain a karaf distro, folder system is missing", distroFolder));
+ final File netconfConnectorFeaturesParentDir = checkExistingDir(new File(systemDir, "org/opendaylight/controller/" + featureName), String.format("Karaf distro in %s does not contain netconf-connector features", distroFolder));
+
+ // Find newest version for features
+ final File newestVersionDir = Collections.max(
+ Lists.newArrayList(netconfConnectorFeaturesParentDir.listFiles(new FileFilter() {
+ @Override
+ public boolean accept(final File pathname) {
+ return pathname.isDirectory();
+ }
+ })), new Comparator<File>() {
+ @Override
+ public int compare(final File o1, final File o2) {
+ return o1.getName().compareTo(o2.getName());
+ }
+ });
+
+ return newestVersionDir.listFiles(new FileFilter() {
+ @Override
+ public boolean accept(final File pathname) {
+ return pathname.getName().contains(featureName);
+ }
+ })[0];
+ }
+
+ private static File checkExistingDir(final File folder, final String msg) {
+ Preconditions.checkArgument(folder.exists(), msg);
+ Preconditions.checkArgument(folder.isDirectory(), msg);
+ return folder;
+ }
+
+ public void changeLoadOrder() {
+ try {
+ Files.write(ByteStreams.toByteArray(getClass().getResourceAsStream("/" +ORG_OPS4J_PAX_URL_MVN_CFG)), loadOrderCfgFile);
+ LOG.info("Load order changed to prefer local bundles/features by rewriting file {}", loadOrderCfgFile);
+ } catch (IOException e) {
+ throw new RuntimeException("Unable to rewrite features file " + loadOrderCfgFile, e);
+ }
+ }
}
}
import com.google.common.base.Charsets;
import com.google.common.base.Function;
+import com.google.common.base.Objects;
import com.google.common.base.Optional;
import com.google.common.collect.Collections2;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.CharStreams;
import com.google.common.util.concurrent.CheckedFuture;
+import com.google.common.util.concurrent.Futures;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.local.LocalAddress;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
+import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.management.ManagementFactory;
import java.net.Inet4Address;
import org.opendaylight.yangtools.yang.model.repo.api.YangTextSchemaSource;
import org.opendaylight.yangtools.yang.model.repo.spi.PotentialSchemaSource;
import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceListener;
+import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceProvider;
import org.opendaylight.yangtools.yang.model.repo.util.FilesystemSchemaSourceCache;
import org.opendaylight.yangtools.yang.parser.builder.impl.BuilderUtils;
import org.opendaylight.yangtools.yang.parser.builder.impl.ModuleBuilder;
private static final Logger LOG = LoggerFactory.getLogger(NetconfDeviceSimulator.class);
- public static final int CONNECTION_TIMEOUT_MILLIS = 20000;
-
private final NioEventLoopGroup nettyThreadgroup;
private final HashedWheelTimer hashedWheelTimer;
private final List<Channel> devicesChannels = Lists.newArrayList();
}
private Map<ModuleBuilder, String> toModuleBuilders(final Map<SourceIdentifier, Map.Entry<ASTSchemaSource, YangTextSchemaSource>> sources) {
- final Map<SourceIdentifier, ParserRuleContext> asts = Maps.transformValues(sources, new Function<Map.Entry<ASTSchemaSource, YangTextSchemaSource>, ParserRuleContext>() {
- @Override
- public ParserRuleContext apply(final Map.Entry<ASTSchemaSource, YangTextSchemaSource> input) {
- return input.getKey().getAST();
- }
- });
- final Map<String, TreeMap<Date, URI>> namespaceContext = BuilderUtils.createYangNamespaceContext(
- asts.values(), Optional.<SchemaContext>absent());
+ final Map<SourceIdentifier, ParserRuleContext> asts = Maps.transformValues(sources, new Function<Map.Entry<ASTSchemaSource, YangTextSchemaSource>, ParserRuleContext>() {
+ @Override
+ public ParserRuleContext apply(final Map.Entry<ASTSchemaSource, YangTextSchemaSource> input) {
+ return input.getKey().getAST();
+ }
+ });
+ final Map<String, TreeMap<Date, URI>> namespaceContext = BuilderUtils.createYangNamespaceContext(
+ asts.values(), Optional.<SchemaContext>absent());
- final ParseTreeWalker walker = new ParseTreeWalker();
- final Map<ModuleBuilder, String> sourceToBuilder = new HashMap<>();
+ final ParseTreeWalker walker = new ParseTreeWalker();
+ final Map<ModuleBuilder, String> sourceToBuilder = new HashMap<>();
- for (final Map.Entry<SourceIdentifier, ParserRuleContext> entry : asts.entrySet()) {
- final ModuleBuilder moduleBuilder = YangParserListenerImpl.create(namespaceContext, entry.getKey().getName(),
- walker, entry.getValue()).getModuleBuilder();
+ for (final Map.Entry<SourceIdentifier, ParserRuleContext> entry : asts.entrySet()) {
+ final ModuleBuilder moduleBuilder = YangParserListenerImpl.create(namespaceContext, entry.getKey().getName(),
+ walker, entry.getValue()).getModuleBuilder();
- try(InputStreamReader stream = new InputStreamReader(sources.get(entry.getKey()).getValue().openStream(), Charsets.UTF_8)) {
- sourceToBuilder.put(moduleBuilder, CharStreams.toString(stream));
- } catch (final IOException e) {
- throw new RuntimeException(e);
- }
+ try(InputStreamReader stream = new InputStreamReader(sources.get(entry.getKey()).getValue().openStream(), Charsets.UTF_8)) {
+ sourceToBuilder.put(moduleBuilder, CharStreams.toString(stream));
+ } catch (final IOException e) {
+ throw new RuntimeException(e);
}
-
- return sourceToBuilder;
}
+ return sourceToBuilder;
+ }
+
public List<Integer> start(final Main.Params params) {
+ LOG.info("Starting {}, {} simulated devices starting on port {}", params.deviceCount, params.ssh ? "SSH" : "TCP", params.startingPort);
+
final Map<ModuleBuilder, String> moduleBuilders = parseSchemasToModuleBuilders(params);
final NetconfServerDispatcher dispatcher = createDispatcher(moduleBuilders, params.exi, params.generateConfigsTimeout);
devicesChannels.add(server.channel());
openDevices.add(currentPort - 1);
-
}
if(openDevices.size() == params.deviceCount) {
- LOG.info("All simulated devices started successfully from port {} to {}", params.startingPort, currentPort);
+ LOG.info("All simulated devices started successfully from port {} to {}", params.startingPort, currentPort - 1);
} else {
LOG.warn("Not all simulated devices started successfully. Started devices ar on ports {}", openDevices);
}
public void schemaSourceUnregistered(final PotentialSchemaSource<?> potentialSchemaSource) {}
});
- final FilesystemSchemaSourceCache<YangTextSchemaSource> cache = new FilesystemSchemaSourceCache<>(consumer, YangTextSchemaSource.class, params.schemasDir);
- consumer.registerSchemaSourceListener(cache);
+ if(params.schemasDir != null) {
+ final FilesystemSchemaSourceCache<YangTextSchemaSource> cache = new FilesystemSchemaSourceCache<>(consumer, YangTextSchemaSource.class, params.schemasDir);
+ consumer.registerSchemaSourceListener(cache);
+ }
+
+ addDefaultSchemas(consumer);
final Map<SourceIdentifier, Map.Entry<ASTSchemaSource, YangTextSchemaSource>> asts = Maps.newHashMap();
for (final SourceIdentifier loadedSource : loadedSources) {
return toModuleBuilders(asts);
}
+ private void addDefaultSchemas(final SharedSchemaRepository consumer) {
+ SourceIdentifier sId = new SourceIdentifier("ietf-netconf-monitoring", "2010-10-04");
+ registerSource(consumer, "/META-INF/yang/ietf-netconf-monitoring.yang", sId);
+
+ sId = new SourceIdentifier("ietf-yang-types", "2013-07-15");
+ registerSource(consumer, "/META-INF/yang/ietf-yang-types@2013-07-15.yang", sId);
+
+ sId = new SourceIdentifier("ietf-inet-types", "2010-09-24");
+ registerSource(consumer, "/META-INF/yang/ietf-inet-types.yang", sId);
+ }
+
+ private void registerSource(final SharedSchemaRepository consumer, final String resource, final SourceIdentifier sourceId) {
+ consumer.registerSchemaSource(new SchemaSourceProvider<SchemaSourceRepresentation>() {
+ @Override
+ public CheckedFuture<? extends SchemaSourceRepresentation, SchemaSourceException> getSource(final SourceIdentifier sourceIdentifier) {
+ return Futures.immediateCheckedFuture(new YangTextSchemaSource(sourceId) {
+ @Override
+ protected Objects.ToStringHelper addToStringAttributes(final Objects.ToStringHelper toStringHelper) {
+ return toStringHelper;
+ }
+
+ @Override
+ public InputStream openStream() throws IOException {
+ return getClass().getResourceAsStream(resource);
+ }
+ });
+ }
+ }, PotentialSchemaSource.create(sourceId, YangTextSchemaSource.class, PotentialSchemaSource.Costs.IMMEDIATE.getValue()));
+ }
+
private static InetSocketAddress getAddress(final int port) {
try {
// TODO make address configurable
--- /dev/null
+################################################################################
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+#
+# If set to true, the following property will not allow any certificate to be used
+# when accessing Maven repositories through SSL
+#
+#org.ops4j.pax.url.mvn.certificateCheck=
+
+#
+# Path to the local Maven settings file.
+# The repositories defined in this file will be automatically added to the list
+# of default repositories if the 'org.ops4j.pax.url.mvn.repositories' property
+# below is not set.
+# The following locations are checked for the existence of the settings.xml file
+# * 1. looks for the specified url
+# * 2. if not found looks for ${user.home}/.m2/settings.xml
+# * 3. if not found looks for ${maven.home}/conf/settings.xml
+# * 4. if not found looks for ${M2_HOME}/conf/settings.xml
+#
+#org.ops4j.pax.url.mvn.settings=
+
+#
+# Path to the local Maven repository which is used to avoid downloading
+# artifacts when they already exist locally.
+# The value of this property will be extracted from the settings.xml file
+# above, or defaulted to:
+# System.getProperty( "user.home" ) + "/.m2/repository"
+#
+org.ops4j.pax.url.mvn.localRepository=${karaf.home}/${karaf.default.repository}
+
+#
+# Default this to false. It's just weird to use undocumented repos
+#
+org.ops4j.pax.url.mvn.useFallbackRepositories=false
+
+#
+# Uncomment if you don't wanna use the proxy settings
+# from the Maven conf/settings.xml file
+#
+# org.ops4j.pax.url.mvn.proxySupport=false
+
+#
+# Disable aether support by default. This ensure that the defaultRepositories
+# below will be used
+#
+#org.ops4j.pax.url.mvn.disableAether=true
+
+#
+# Comma separated list of repositories scanned when resolving an artifact.
+# Those repositories will be checked before iterating through the
+# below list of repositories and even before the local repository
+# A repository url can be appended with zero or more of the following flags:
+# @snapshots : the repository contains snaphots
+# @noreleases : the repository does not contain any released artifacts
+#
+# The following property value will add the system folder as a repo.
+#
+#org.ops4j.pax.url.mvn.defaultRepositories=
+
+# Use the default local repo (e.g.~/.m2/repository) as a "remote" repo
+org.ops4j.pax.url.mvn.defaultLocalRepoAsRemote=false
+
+#
+# Comma separated list of repositories scanned when resolving an artifact.
+# The default list includes the following repositories containing releases:
+# http://repo1.maven.org/maven2
+# http://repository.apache.org/content/groups/snapshots-group
+# http://svn.apache.org/repos/asf/servicemix/m2-repo
+# http://repository.springsource.com/maven/bundles/release
+# http://repository.springsource.com/maven/bundles/external
+# To add repositories to the default ones, prepend '+' to the list of repositories
+# to add.
+# A repository url can be appended with zero or more of the following flags:
+# @snapshots : the repository contains snaphots
+# @noreleases : the repository does not contain any released artifacts
+# @id=reponid : the id for the repository, just like in the settings.xml this is optional but recomendet
+#
+# The default list doesn't contain any repository containing snapshots as it can impact the artifacts resolution.
+# You may want to add the following repositories containing snapshots:
+# http://repository.apache.org/content/groups/snapshots-group@id=apache@snapshots@noreleases
+# http://oss.sonatype.org/content/repositories/snapshots@id=sonatype.snapshots.deploy@snapshots@norelease
+# http://oss.sonatype.org/content/repositories/ops4j-snapshots@id=ops4j.sonatype.snapshots.deploy@snapshots@noreleases
+#
+org.ops4j.pax.url.mvn.repositories= \
+ file:${karaf.home}/${karaf.default.repository}@id=system.repository, \
+ file:${karaf.data}/kar@id=kar.repository@multi, \
+ http://repo1.maven.org/maven2@id=central, \
+ http://repository.springsource.com/maven/bundles/release@id=spring.ebr.release, \
+ http://repository.springsource.com/maven/bundles/external@id=spring.ebr.external
<module>netconf-connector-config</module>
<module>netconf-auth</module>
<module>netconf-usermanager</module>
+ <module>netconf-testtool</module>
</modules>
<dependencies>
<module>netconf-it</module>
</modules>
</profile>
-
- <profile>
- <id>testtool</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <modules>
- <module>netconf-testtool</module>
- </modules>
- </profile>
</profiles>
</project>
c.add(createContainerServiceDependency(containerName).setService(
IInventoryService.class).setCallbacks(
"setInventoryService", "unsetInventoryService")
- .setRequired(false));
+ .setRequired(true));
c.add(createContainerServiceDependency(containerName).setService(
IStatisticsManager.class).setCallbacks(
"setStatisticsManager", "unsetStatisticsManager")