@Benchmark
@Warmup(iterations = 10, timeUnit = TimeUnit.MILLISECONDS)
@Measurement(iterations = 20, timeUnit = TimeUnit.MILLISECONDS)
- public void singleNodes100KWriteBenchmark() throws Exception {
- applyWriteSingleNode(OUTER_LIST_100K);
- }
-
- private void applyWriteSingleNode(final int reps) throws DataValidationFailedException {
+ public void write100KSingleNodeWithOneInnerItemInOneCommitBenchmark() throws Exception {
final DataTreeSnapshot snapshot = datastore.takeSnapshot();
final DataTreeModification modification = snapshot.newModification();
- for (int outerListKey = 0; outerListKey < reps; ++outerListKey) {
+ for (int outerListKey = 0; outerListKey < OUTER_LIST_100K; ++outerListKey) {
modification.write(OUTER_LIST_100K_PATHS[outerListKey], OUTER_LIST_ONE_ITEM_INNER_LIST[outerListKey]);
}
datastore.validate(modification);
@Benchmark
@Warmup(iterations = 10, timeUnit = TimeUnit.MILLISECONDS)
@Measurement(iterations = 20, timeUnit = TimeUnit.MILLISECONDS)
- public void twoNodes50KWriteBenchmark() throws Exception {
- applyWriteTwoNodes(OUTER_LIST_50K);
+ public void write100KSingleNodeWithOneInnerItemInCommitPerWriteBenchmark() throws Exception {
+ final DataTreeSnapshot snapshot = datastore.takeSnapshot();
+ for (int outerListKey = 0; outerListKey < OUTER_LIST_100K; ++outerListKey) {
+ final DataTreeModification modification = snapshot.newModification();
+ modification.write(OUTER_LIST_100K_PATHS[outerListKey], OUTER_LIST_ONE_ITEM_INNER_LIST[outerListKey]);
+ datastore.validate(modification);
+ final DataTreeCandidate candidate = datastore.prepare(modification);
+ datastore.commit(candidate);
+ }
}
- private void applyWriteTwoNodes(final int reps) throws DataValidationFailedException {
+ @Benchmark
+ @Warmup(iterations = 10, timeUnit = TimeUnit.MILLISECONDS)
+ @Measurement(iterations = 20, timeUnit = TimeUnit.MILLISECONDS)
+ public void write50KSingleNodeWithTwoInnerItemsInOneCommitBenchmark() throws Exception {
final DataTreeSnapshot snapshot = datastore.takeSnapshot();
final DataTreeModification modification = snapshot.newModification();
- for (int outerListKey = 0; outerListKey < reps; ++outerListKey) {
+ for (int outerListKey = 0; outerListKey < OUTER_LIST_50K; ++outerListKey) {
modification.write(OUTER_LIST_50K_PATHS[outerListKey], OUTER_LIST_TWO_ITEM_INNER_LIST[outerListKey]);
}
datastore.validate(modification);
@Benchmark
@Warmup(iterations = 10, timeUnit = TimeUnit.MILLISECONDS)
@Measurement(iterations = 20, timeUnit = TimeUnit.MILLISECONDS)
- public void tenNodes10KWriteBenchmark() throws Exception {
- applyWriteTenNodes(OUTER_LIST_10K);
+ public void write50KSingleNodeWithTwoInnerItemsInCommitPerWriteBenchmark() throws Exception {
+ final DataTreeSnapshot snapshot = datastore.takeSnapshot();
+ for (int outerListKey = 0; outerListKey < OUTER_LIST_50K; ++outerListKey) {
+ final DataTreeModification modification = snapshot.newModification();
+ modification.write(OUTER_LIST_50K_PATHS[outerListKey], OUTER_LIST_TWO_ITEM_INNER_LIST[outerListKey]);
+ datastore.validate(modification);
+ final DataTreeCandidate candidate = datastore.prepare(modification);
+ datastore.commit(candidate);
+ }
}
- private void applyWriteTenNodes(final int reps) throws DataValidationFailedException {
+ @Benchmark
+ @Warmup(iterations = 10, timeUnit = TimeUnit.MILLISECONDS)
+ @Measurement(iterations = 20, timeUnit = TimeUnit.MILLISECONDS)
+ public void write10KSingleNodeWithTenInnerItemsInOneCommitBenchmark() throws Exception {
final DataTreeSnapshot snapshot = datastore.takeSnapshot();
final DataTreeModification modification = snapshot.newModification();
- for (int outerListKey = 0; outerListKey < reps; ++outerListKey) {
+ for (int outerListKey = 0; outerListKey < OUTER_LIST_10K; ++outerListKey) {
modification.write(OUTER_LIST_10K_PATHS[outerListKey], OUTER_LIST_TEN_ITEM_INNER_LIST[outerListKey]);
}
datastore.validate(modification);
final DataTreeCandidate candidate = datastore.prepare(modification);
datastore.commit(candidate);
}
+
+ @Benchmark
+ @Warmup(iterations = 10, timeUnit = TimeUnit.MILLISECONDS)
+ @Measurement(iterations = 20, timeUnit = TimeUnit.MILLISECONDS)
+ public void write10KSingleNodeWithTenInnerItemsInCommitPerWriteBenchmark() throws Exception {
+ final DataTreeSnapshot snapshot = datastore.takeSnapshot();
+ for (int outerListKey = 0; outerListKey < OUTER_LIST_10K; ++outerListKey) {
+ final DataTreeModification modification = snapshot.newModification();
+ modification.write(OUTER_LIST_10K_PATHS[outerListKey], OUTER_LIST_TEN_ITEM_INNER_LIST[outerListKey]);
+ datastore.validate(modification);
+ final DataTreeCandidate candidate = datastore.prepare(modification);
+ datastore.commit(candidate);
+ }
+ }
}
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableSortedMap;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
private static class IdentifiableItemCodec implements Codec<NodeIdentifierWithPredicates, IdentifiableItem<?, ?>> {
- private final ImmutableSortedMap<QName, ValueContext> keyValueContexts;
+ private final Map<QName, ValueContext> keyValueContexts;
private final ListSchemaNode schema;
private final Constructor<? extends Identifier<?>> constructor;
private final Class<?> identifiable;
final Class<?> identifiable, final Map<QName, ValueContext> keyValueContexts) {
this.schema = schema;
this.identifiable = identifiable;
- this.keyValueContexts = ImmutableSortedMap.copyOf(keyValueContexts);
this.constructor = getConstructor(keyClass);
+
+ /*
+ * We need to re-index to make sure we instantiate nodes in the order in which
+ * they are defined.
+ */
+ final Map<QName, ValueContext> keys = new LinkedHashMap<>();
+ for (QName qname : schema.getKeyDefinition()) {
+ keys.put(qname, keyValueContexts.get(qname));
+ }
+ this.keyValueContexts = ImmutableMap.copyOf(keys);
}
@Override
public IdentifiableItem<?, ?> deserialize(final NodeIdentifierWithPredicates input) {
- ArrayList<Object> bindingValues = new ArrayList<>();
-
- for(QName key: schema.getKeyDefinition()) {
+ final Collection<QName> keys = schema.getKeyDefinition();
+ final ArrayList<Object> bindingValues = new ArrayList<>(keys.size());
+ for (QName key : keys) {
Object yangValue = input.getKeyValues().get(key);
bindingValues.add(keyValueContexts.get(key).deserialize(yangValue));
}
+
+ final Identifier<?> identifier;
try {
- final Identifier<?> identifier = constructor.newInstance(bindingValues.toArray());
- @SuppressWarnings({ "rawtypes", "unchecked" })
- final IdentifiableItem identifiableItem = new IdentifiableItem(identifiable, identifier);
- return identifiableItem;
+ identifier = constructor.newInstance(bindingValues.toArray());
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
- throw new IllegalStateException(e);
+ throw new IllegalStateException(String.format("Failed to instantiate key class %s", constructor.getDeclaringClass()), e);
}
+
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ final IdentifiableItem identifiableItem = new IdentifiableItem(identifiable, identifier);
+ return identifiableItem;
}
@Override
public NodeIdentifierWithPredicates serialize(final IdentifiableItem<?, ?> input) {
Object value = input.getKey();
- Map<QName, Object> values = new HashMap<>();
+ Map<QName, Object> values = new LinkedHashMap<>();
for (Entry<QName, ValueContext> valueCtx : keyValueContexts.entrySet()) {
values.put(valueCtx.getKey(), valueCtx.getValue().getAndSerialize(value));
}
return new NodeIdentifierWithPredicates(schema.getQName(), values);
}
-
}
@SuppressWarnings("unchecked")
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
-import java.util.HashMap;
import java.util.HashSet;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
}
private PathArgument serializeIdentifiableItem(final IdentifiableItem<?,?> argument, final QName previousQname) {
- Map<QName, Object> predicates = new HashMap<>();
@SuppressWarnings("rawtypes")
Class type = argument.getType();
@SuppressWarnings("unchecked")
ValueWithQName combinedInput = new ValueWithQName(previousQname, argument.getKey());
@SuppressWarnings("unchecked")
CompositeNode compositeOutput = keyCodec.serialize(combinedInput);
+
+ final Map<QName, Object> predicates = new LinkedHashMap<>();
for (Node<?> outputValue : compositeOutput.getValue()) {
predicates.put(outputValue.getNodeType(), outputValue.getValue());
}
</plugin>
</plugins>
</reporting>
+
+
+ <!-- Note: we can not use variables for these URLs because we need to
+ be able to download the parent pom from the repository the first
+ time we go to use it (since it is in a different project).
+ To override the settings, use the "mirror" section of the
+ settings.xml. See http://maven.apache.org/settings.html -->
<repositories>
<!-- OpenDayLight Repo Mirror -->
<repository>
<id>opendaylight-mirror</id>
<name>opendaylight-mirror</name>
- <url>${nexusproxy}/groups/public/</url>
+ <url>http://nexus.opendaylight.org/content/groups/public/</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
<repository>
<id>opendaylight-snapshot</id>
<name>opendaylight-snapshot</name>
- <url>${nexusproxy}/repositories/opendaylight.snapshot/</url>
+ <url>http://nexus.opendaylight.org/content/repositories/opendaylight.snapshot/</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
if( LOG.isTraceEnabled() ) {
LOG.trace( "{}: submitNotifications for listener {}: {}",
- name, listener.getClass(), notifications );
+ name, listener.toString(), notifications );
}
ListenerKey<L> key = new ListenerKey<>( listener );
// to the caller.
LOG.debug( "{}: Submitting NotificationTask for listener {}",
- name, listener.getClass() );
+ name, listener.toString() );
executor.execute( newNotificationTask );
break;
// telling us to quit.
LOG.debug( "{}: Interrupted trying to add to {} listener's queue",
- name, listener.getClass() );
+ name, listener.toString() );
}
if( LOG.isTraceEnabled() ) {
LOG.trace( "{}: submitNotifications dine for listener {}",
- name, listener.getClass() );
+ name, listener.toString() );
}
}
List<ListenerNotificationQueueStats> statsList = new ArrayList<>( listenerCache.size() );
for( NotificationTask task: listenerCache.values() ) {
statsList.add( new ListenerNotificationQueueStats(
- task.listenerKey.getListener().getClass().getName(),
- task.notificationQueue.size() ) );
+ task.listenerKey.toString(), task.notificationQueue.size() ) );
}
return statsList ;
ListenerKey<?> other = (ListenerKey<?>) obj;
return listener == other.listener;
}
+
+ @Override
+ public String toString() {
+ return listener.toString();
+ }
}
/**
if( LOG.isDebugEnabled() ) {
LOG.debug( "{}: Offering notification to the queue for listener {}: {}",
- name, listenerKey.getListener().getClass(), notification );
+ name, listenerKey.toString(), notification );
}
if( notificationQueue.offer( notification, 1, TimeUnit.MINUTES ) ) {
LOG.warn(
"{}: Timed out trying to offer a notification to the queue for listener {}." +
"The queue has reached its capacity of {}",
- name, listenerKey.getListener().getClass(), maxQueueCapacity );
+ name, listenerKey.toString(), maxQueueCapacity );
}
}
// The executor is probably shutting down so log as debug.
LOG.debug( "{}: Interrupted trying to remove from {} listener's queue",
- name, listenerKey.getListener().getClass() );
+ name, listenerKey.toString() );
} finally {
// We're exiting, gracefully or not - either way make sure we always remove
if( LOG.isDebugEnabled() ) {
LOG.debug( "{}: Invoking listener {} with notification: {}",
- name, listenerKey.getListener().getClass(), notification );
+ name, listenerKey.toString(), notification );
}
listenerInvoker.invokeListener( listenerKey.getListener(), notification );
// remaining notifications.
LOG.error( String.format( "%1$s: Error notifying listener %2$s", name,
- listenerKey.getListener().getClass() ), e );
+ listenerKey.toString() ), e );
} catch( Error e ) {
private static abstract class AbstractPathArgument implements PathArgument {
private static final long serialVersionUID = -4546547994250849340L;
private final QName nodeType;
+ private volatile transient Integer hash = null;
protected AbstractPathArgument(final QName nodeType) {
this.nodeType = Preconditions.checkNotNull(nodeType);
return nodeType.compareTo(o.getNodeType());
}
- @Override
- public int hashCode() {
+ protected int hashCodeImpl() {
return 31 + getNodeType().hashCode();
}
+ @Override
+ public final int hashCode() {
+ Integer ret = hash;
+ if (ret == null) {
+ synchronized (this) {
+ ret = hash;
+ if (ret == null) {
+ ret = hashCodeImpl();
+ hash = ret;
+ }
+ }
+ }
+
+ return ret;
+ }
+
@Override
public boolean equals(final Object obj) {
if (this == obj) {
}
@Override
- public int hashCode() {
+ protected int hashCodeImpl() {
final int prime = 31;
- int result = super.hashCode();
+ int result = super.hashCodeImpl();
result = prime * result;
for (Entry<QName, Object> entry : keyValues.entrySet()) {
}
@Override
- public int hashCode() {
+ protected int hashCodeImpl() {
final int prime = 31;
- int result = super.hashCode();
+ int result = super.hashCodeImpl();
result = prime * result + ((value == null) ? 0 : YangInstanceIdentifier.hashCode(value));
return result;
}
import static org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter.UNKNOWN_SIZE;
import com.google.common.annotations.Beta;
+import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
+import com.google.common.base.Predicate;
+import com.google.common.collect.Iterables;
import java.io.Closeable;
import java.io.Flushable;
import java.io.IOException;
+import java.util.Collection;
+import java.util.Set;
import javax.xml.stream.XMLStreamReader;
+import org.opendaylight.yangtools.yang.common.QName;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.AnyXmlNode;
import org.opendaylight.yangtools.yang.data.api.schema.AugmentationNode;
import org.opendaylight.yangtools.yang.data.api.schema.ChoiceNode;
import org.opendaylight.yangtools.yang.data.api.schema.OrderedMapNode;
import org.opendaylight.yangtools.yang.data.api.schema.UnkeyedListEntryNode;
import org.opendaylight.yangtools.yang.data.api.schema.UnkeyedListNode;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* This is an experimental iterator over a {@link NormalizedNode}. This is essentially
* us to write multiple nodes.
*/
@Beta
-public final class NormalizedNodeWriter implements Closeable, Flushable {
+public class NormalizedNodeWriter implements Closeable, Flushable {
private final NormalizedNodeStreamWriter writer;
private NormalizedNodeWriter(final NormalizedNodeStreamWriter writer) {
this.writer = Preconditions.checkNotNull(writer);
}
+ protected final NormalizedNodeStreamWriter getWriter() {
+ return writer;
+ }
+
/**
* Create a new writer backed by a {@link NormalizedNodeStreamWriter}.
*
- * @param writer Backend writer
+ * @param writer Back-end writer
* @return A new instance.
*/
public static NormalizedNodeWriter forStreamWriter(final NormalizedNodeStreamWriter writer) {
- return new NormalizedNodeWriter(writer);
+ return forStreamWriter(writer, true);
+ }
+
+ /**
+ * Create a new writer backed by a {@link NormalizedNodeStreamWriter}. Unlike the simple {@link #forStreamWriter(NormalizedNodeStreamWriter)}
+ * method, this allows the caller to switch off RFC6020 XML compliance, providing better
+ * throughput. The reason is that the XML mapping rules in RFC6020 require the encoding
+ * to emit leaf nodes which participate in a list's key first and in the order in which
+ * they are defined in the key. For JSON, this requirement is completely relaxed and leaves
+ * can be ordered in any way we see fit. The former requires a bit of work: first a lookup
+ * for each key and then for each emitted node we need to check whether it was already
+ * emitted.
+ *
+ * @param writer Back-end writer
+ * @param orderKeyLeaves whether the returned instance should be RFC6020 XML compliant.
+ * @return A new instance.
+ */
+ public static NormalizedNodeWriter forStreamWriter(final NormalizedNodeStreamWriter writer, final boolean orderKeyLeaves) {
+ if (orderKeyLeaves) {
+ return new OrderedNormalizedNodeWriter(writer);
+ } else {
+ return new NormalizedNodeWriter(writer);
+ }
}
/**
* @return
* @throws IOException when thrown from the backing writer.
*/
- public NormalizedNodeWriter write(final NormalizedNode<?, ?> node) throws IOException {
+ public final NormalizedNodeWriter write(final NormalizedNode<?, ?> node) throws IOException {
if (wasProcessedAsCompositeNode(node)) {
return this;
}
throw new IllegalStateException("It wasn't possible to serialize node " + node);
}
+ @Override
+ public void flush() throws IOException {
+ writer.flush();
+ }
+
+ @Override
+ public void close() throws IOException {
+ writer.flush();
+ writer.close();
+ }
+
+ /**
+ * Emit a best guess of a hint for a particular set of children. It evaluates the
+ * iterable to see if the size can be easily gotten to. If it is, we hint at the
+ * real number of child nodes. Otherwise we emit UNKNOWN_SIZE.
+ *
+ * @param children Child nodes
+ * @return Best estimate of the collection size required to hold all the children.
+ */
+ static final int childSizeHint(final Iterable<?> children) {
+ return (children instanceof Collection) ? ((Collection<?>) children).size() : UNKNOWN_SIZE;
+ }
+
private boolean wasProcessAsSimpleNode(final NormalizedNode<?, ?> node) throws IOException {
if (node instanceof LeafSetEntryNode) {
final LeafSetEntryNode<?> nodeAsLeafList = (LeafSetEntryNode<?>)node;
return false;
}
+ /**
+ * Emit events for all children and then emit an endNode() event.
+ *
+ * @param children Child iterable
+ * @return True
+ * @throws IOException when the writer reports it
+ */
+ protected final boolean writeChildren(final Iterable<? extends NormalizedNode<?, ?>> children) throws IOException {
+ for (NormalizedNode<?, ?> child : children) {
+ write(child);
+ }
+
+ writer.endNode();
+ return true;
+ }
+
+ protected boolean writeMapEntryNode(final MapEntryNode node) throws IOException {
+ writer.startMapEntryNode(node.getIdentifier(), childSizeHint(node.getValue()));
+ return writeChildren(node.getValue());
+ }
+
private boolean wasProcessedAsCompositeNode(final NormalizedNode<?, ?> node) throws IOException {
- boolean hasDataContainerChild = false;
if (node instanceof ContainerNode) {
- writer.startContainerNode(((ContainerNode) node).getIdentifier(), UNKNOWN_SIZE);
- hasDataContainerChild = true;
- } else if (node instanceof MapEntryNode) {
- writer.startMapEntryNode(((MapEntryNode) node).getIdentifier(), UNKNOWN_SIZE);
- hasDataContainerChild = true;
- } else if (node instanceof UnkeyedListEntryNode) {
- writer.startUnkeyedListItem(((UnkeyedListEntryNode) node).getIdentifier(), UNKNOWN_SIZE);
- hasDataContainerChild = true;
- } else if (node instanceof ChoiceNode) {
- writer.startChoiceNode(((ChoiceNode) node).getIdentifier(), UNKNOWN_SIZE);
- hasDataContainerChild = true;
- } else if (node instanceof AugmentationNode) {
- writer.startAugmentationNode(((AugmentationNode) node).getIdentifier());
- hasDataContainerChild = true;
- } else if (node instanceof UnkeyedListNode) {
- writer.startUnkeyedList(((UnkeyedListNode) node).getIdentifier(), UNKNOWN_SIZE);
- hasDataContainerChild = true;
- } else if (node instanceof OrderedMapNode) {
- writer.startOrderedMapNode(((OrderedMapNode) node).getIdentifier(), UNKNOWN_SIZE);
- hasDataContainerChild = true;
- } else if (node instanceof MapNode) {
- writer.startMapNode(((MapNode) node).getIdentifier(), UNKNOWN_SIZE);
- hasDataContainerChild = true;
- //covers also OrderedLeafSetNode for which doesn't exist start* method
- } else if (node instanceof LeafSetNode) {
- writer.startLeafSet(((LeafSetNode<?>) node).getIdentifier(), UNKNOWN_SIZE);
- hasDataContainerChild = true;
- }
-
- if (hasDataContainerChild) {
- for (NormalizedNode<?, ?> childNode : ((NormalizedNode<?, Iterable<NormalizedNode<?, ?>>>) node).getValue()) {
- write(childNode);
- }
-
- writer.endNode();
- return true;
+ final ContainerNode n = (ContainerNode) node;
+ writer.startContainerNode(n.getIdentifier(), childSizeHint(n.getValue()));
+ return writeChildren(n.getValue());
+ }
+ if (node instanceof MapEntryNode) {
+ return writeMapEntryNode((MapEntryNode) node);
+ }
+ if (node instanceof UnkeyedListEntryNode) {
+ final UnkeyedListEntryNode n = (UnkeyedListEntryNode) node;
+ writer.startUnkeyedListItem(n.getIdentifier(), childSizeHint(n.getValue()));
+ return writeChildren(n.getValue());
+ }
+ if (node instanceof ChoiceNode) {
+ final ChoiceNode n = (ChoiceNode) node;
+ writer.startChoiceNode(n.getIdentifier(), childSizeHint(n.getValue()));
+ return writeChildren(n.getValue());
+ }
+ if (node instanceof AugmentationNode) {
+ final AugmentationNode n = (AugmentationNode) node;
+ writer.startAugmentationNode(n.getIdentifier());
+ return writeChildren(n.getValue());
+ }
+ if (node instanceof UnkeyedListNode) {
+ final UnkeyedListNode n = (UnkeyedListNode) node;
+ writer.startUnkeyedList(n.getIdentifier(), childSizeHint(n.getValue()));
+ return writeChildren(n.getValue());
+ }
+ if (node instanceof OrderedMapNode) {
+ final OrderedMapNode n = (OrderedMapNode) node;
+ writer.startOrderedMapNode(n.getIdentifier(), childSizeHint(n.getValue()));
+ return writeChildren(n.getValue());
+ }
+ if (node instanceof MapNode) {
+ final MapNode n = (MapNode) node;
+ writer.startMapNode(n.getIdentifier(), childSizeHint(n.getValue()));
+ return writeChildren(n.getValue());
+ }
+ if (node instanceof LeafSetNode) {
+ //covers also OrderedLeafSetNode for which doesn't exist start* method
+ final LeafSetNode<?> n = (LeafSetNode<?>) node;
+ writer.startLeafSet(n.getIdentifier(), childSizeHint(n.getValue()));
+ return writeChildren(n.getValue());
}
- return false;
+ return false;
}
- @Override
- public void flush() throws IOException {
- writer.flush();
- }
+ private static final class OrderedNormalizedNodeWriter extends NormalizedNodeWriter {
+ private static final Logger LOG = LoggerFactory.getLogger(OrderedNormalizedNodeWriter.class);
- @Override
- public void close() throws IOException {
- writer.close();
+ OrderedNormalizedNodeWriter(final NormalizedNodeStreamWriter writer) {
+ super(writer);
+ }
+
+ @Override
+ protected boolean writeMapEntryNode(final MapEntryNode node) throws IOException {
+ getWriter().startMapEntryNode(node.getIdentifier(), childSizeHint(node.getValue()));
+
+ final Set<QName> qnames = node.getIdentifier().getKeyValues().keySet();
+ // Write out all the key children
+ for (QName qname : qnames) {
+ final Optional<? extends NormalizedNode<?, ?>> child = node.getChild(new NodeIdentifier(qname));
+ if (child.isPresent()) {
+ write(child.get());
+ } else {
+ LOG.info("No child for key element {} found", qname);
+ }
+ }
+
+ // Write all the rest
+ return writeChildren(Iterables.filter(node.getValue(), new Predicate<NormalizedNode<?, ?>>() {
+ @Override
+ public boolean apply(final NormalizedNode<?, ?> input) {
+ if (input instanceof AugmentationNode) {
+ return true;
+ }
+ if (!qnames.contains(input.getNodeType())) {
+ return true;
+ }
+
+ LOG.debug("Skipping key child {}", input);
+ return false;
+ }
+ }));
+ }
}
}
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
+import com.google.common.collect.ImmutableSet;
import com.google.gson.stream.JsonWriter;
+
import java.io.IOException;
import java.io.Writer;
+import java.math.BigDecimal;
+import java.math.BigInteger;
import java.net.URI;
import java.util.ArrayDeque;
+import java.util.Collection;
import java.util.Deque;
+
import org.opendaylight.yangtools.concepts.Codec;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.AugmentationIdentifier;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
}
}
+ private static final Collection<Class<?>> NUMERIC_CLASSES =
+ ImmutableSet.<Class<?>>of(Byte.class, Short.class, Integer.class, Long.class, BigInteger.class, BigDecimal.class);
private final Deque<TypeInfo> stack = new ArrayDeque<>();
private final SchemaContext schemaContext;
private final CodecFactory codecs;
private final Writer writer;
private final String indent;
- private URI currentNamespace = null;
private int currentDepth = 0;
+ private URI currentNamespace;
private JSONNormalizedNodeStreamWriter(final SchemaContext schemaContext,
final Writer writer, final int indentSize) {
- this.schemaContext = Preconditions.checkNotNull(schemaContext);
- this.writer = Preconditions.checkNotNull(writer);
-
- Preconditions.checkArgument(indentSize >= 0, "Indent size must be non-negative");
- if (indentSize != 0) {
- indent = Strings.repeat(" ", indentSize);
- } else {
- indent = null;
- }
-
- this.codecs = CodecFactory.create(schemaContext);
- this.tracker = SchemaTracker.create(schemaContext);
+ this(schemaContext, SchemaPath.ROOT, writer, null, indentSize);
}
private JSONNormalizedNodeStreamWriter(final SchemaContext schemaContext, final SchemaPath path,
- final Writer writer, final URI initialNs,final int indentSize) {
+ final Writer writer, final URI initialNs, final int indentSize) {
this.schemaContext = Preconditions.checkNotNull(schemaContext);
this.writer = Preconditions.checkNotNull(writer);
} else {
indent = null;
}
- this.currentNamespace = initialNs;
this.codecs = CodecFactory.create(schemaContext);
- this.tracker = SchemaTracker.create(schemaContext,path);
+ this.tracker = SchemaTracker.create(schemaContext, path);
+
+ this.currentNamespace = initialNs;
}
/**
* @param writer Output writer
* @return A stream writer instance
*/
- public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, SchemaPath path,final Writer writer) {
+ public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final SchemaPath path,final Writer writer) {
return new JSONNormalizedNodeStreamWriter(schemaContext, path, writer, null, 0);
}
* @param initialNs Initial namespace
* @return A stream writer instance
*/
- public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, SchemaPath path,URI initialNs, final Writer writer) {
+ public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final SchemaPath path,final URI initialNs, final Writer writer) {
return new JSONNormalizedNodeStreamWriter(schemaContext, path, writer, initialNs, 0);
}
separateElementFromPreviousElement();
writeJsonIdentifier(name);
currentNamespace = stack.peek().getNamespace();
- writeValue(String.valueOf(codec.serialize(value)));
+ writeValue(codec.serialize(value));
separateNextSiblingsWithComma();
}
final Codec<Object, Object> codec = codecs.codecFor(schema.getType());
separateElementFromPreviousElement();
- writeValue(String.valueOf(codec.serialize(value)));
+ writeValue(codec.serialize(value));
separateNextSiblingsWithComma();
}
public void startUnkeyedListItem(final NodeIdentifier name, final int childSizeHint) throws IOException {
tracker.startListItem(name);
- stack.push(new TypeInfo(NodeType.OBJECT, name.getNodeType().getNamespace()));
separateElementFromPreviousElement();
+ stack.push(new TypeInfo(NodeType.OBJECT, name.getNodeType().getNamespace()));
writeStartObject();
indentRight();
}
separateElementFromPreviousElement();
writeJsonIdentifier(name);
currentNamespace = stack.peek().getNamespace();
- writeValue(value.toString());
+ writeValue(value);
separateNextSiblingsWithComma();
}
}
}
- private void writeValue(final String value) throws IOException {
- writer.append('"');
- writer.append(value);
- writer.append('"');
+ private void writeValue(final Object value) throws IOException {
+ final String str = String.valueOf(value);
+
+ if (!NUMERIC_CLASSES.contains(value.getClass())) {
+ writer.append('"');
+ writer.append(str);
+ writer.append('"');
+ } else {
+ writer.append(str);
+ }
}
private void writeJsonIdentifier(final NodeIdentifier name) throws IOException {
*/
package org.opendaylight.yangtools.yang.data.codec.gson;
-import com.google.common.base.Function;
import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
import java.io.IOException;
+import java.util.Collection;
import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-import javax.annotation.Nonnull;
-
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates;
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
import org.opendaylight.yangtools.yang.model.api.ListSchemaNode;
class ListEntryNodeDataWithSchema extends CompositeNodeDataWithSchema {
- private static final Function<SimpleNodeDataWithSchema, Object> VALUE_FUNCTION = new Function<SimpleNodeDataWithSchema, Object>() {
- @Override
- public Object apply(@Nonnull final SimpleNodeDataWithSchema input) {
- return input.getValue();
- }
- };
private final Map<QName, SimpleNodeDataWithSchema> qNameToKeys = new HashMap<>();
@Override
public void write(final NormalizedNodeStreamWriter writer) throws IOException {
- final int keyCount = ((ListSchemaNode) getSchema()).getKeyDefinition().size();
- if (keyCount == 0) {
+ final Collection<QName> keyDef = ((ListSchemaNode) getSchema()).getKeyDefinition();
+ if (keyDef.isEmpty()) {
writer.startUnkeyedListItem(provideNodeIdentifier(), childSizeHint());
super.write(writer);
writer.endNode();
return;
}
- Preconditions.checkState(keyCount == qNameToKeys.size(), "Input is missing some of the keys of %s", getSchema().getQName());
+ Preconditions.checkState(keyDef.size() == qNameToKeys.size(), "Input is missing some of the keys of %s", getSchema().getQName());
+
+ // Need to restore schema order...
+ final Map<QName, Object> predicates = new LinkedHashMap<>();
+ for (QName qname : keyDef) {
+ predicates.put(qname, qNameToKeys.get(qname).getValue());
+ }
+
writer.startMapEntryNode(
- new NodeIdentifierWithPredicates(getSchema().getQName(), Maps.transformValues(qNameToKeys, VALUE_FUNCTION)),
+ new NodeIdentifierWithPredicates(getSchema().getQName(), predicates),
childSizeHint());
super.write(writer);
writer.endNode();
private final DataNodeContainer root;
private SchemaTracker(final SchemaContext context, final SchemaPath path) {
- DataNodeContainer current = Preconditions.checkNotNull(context);
+ DataSchemaNode current = Preconditions.checkNotNull(context);
for (QName qname : path.getPathFromRoot()) {
- final DataSchemaNode child = current.getDataChildByName(qname);
- Preconditions.checkArgument(child instanceof DataNodeContainer);
- current = (DataNodeContainer) child;
+ final DataSchemaNode child;
+ if(current instanceof DataNodeContainer) {
+ child = ((DataNodeContainer) current).getDataChildByName(qname);
+ } else if (current instanceof ChoiceNode) {
+ child = ((ChoiceNode) current).getCaseNodeByName(qname);
+ } else {
+ throw new IllegalArgumentException(String.format("Schema node %s does not allow children.",current));
+ }
+ current = child;
}
-
- this.root = current;
+ Preconditions.checkArgument(current instanceof DataNodeContainer,"Schema path must point to container or list. Supplied path %s pointed to: %s",path,current);
+ this.root = (DataNodeContainer) current;
}
/**
package org.opendaylight.yangtools.yang.data.impl.codec.xml;
import com.google.common.base.Preconditions;
-
import java.net.URI;
-
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.util.AbstractStringIdentityrefCodec;
import org.opendaylight.yangtools.yang.model.api.Module;
@Override
protected QName createQName(final String prefix, final String localName) {
- final String namespace = element.lookupNamespaceURI(prefix);
+ final String namespace = element.lookupNamespaceURI(!prefix.isEmpty() ? prefix : null);
Preconditions.checkArgument(namespace != null, "Failed to lookup prefix %s", prefix);
final URI ns = URI.create(namespace);
if (rpcDefinition.isPresent()) {
RpcDefinition rpc = rpcDefinition.get();
- final Collection<DataSchemaNode> outputNode = rpc.getOutput().getChildNodes();
+ final Collection<DataSchemaNode> outputNode = rpc.getOutput() != null ? rpc.getOutput().getChildNodes() : null;
final Element rpcReplyElement = document.getDocumentElement();
final QName partialQName = qNameFromElement(rpcReplyElement);
if (RPC_REPLY_QNAME.equals(partialQName)) {
final List<Node<?>> domNodes = toDomNodes(rpcReplyElement, Optional.fromNullable(outputNode), context);
+ QName qName = rpc.getOutput() != null ? rpc.getOutput().getQName() : QName.cachedReference(QName.create(rpcName, "output"));
List<Node<?>> rpcOutNodes = Collections.<Node<?>>singletonList(ImmutableCompositeNode.create(
- rpc.getOutput().getQName(), domNodes));
+ qName, domNodes));
return ImmutableCompositeNode.create(rpcName, rpcOutNodes);
}
}
return leafNode(new NodeIdentifier(name), value);
}
- public static DataContainerNodeBuilder<NodeIdentifierWithPredicates, MapEntryNode> mapEntryBuilder(final QName nodeName,final QName keyName,final Object keyValue) {
+ public static DataContainerNodeBuilder<NodeIdentifierWithPredicates, MapEntryNode> mapEntryBuilder(final QName nodeName, final QName keyName, final Object keyValue) {
return ImmutableMapEntryNodeBuilder.create()
- .withNodeIdentifier(new NodeIdentifierWithPredicates(nodeName, keyName,keyValue))
+ .withNodeIdentifier(new NodeIdentifierWithPredicates(nodeName, keyName, keyValue))
.withChild(leafNode(keyName, keyValue));
}
*/
package org.opendaylight.yangtools.yang.data.impl.schema.builder.impl;
+import com.google.common.base.Preconditions;
import java.util.Collection;
+import java.util.LinkedHashMap;
import java.util.Map;
-
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
import org.opendaylight.yangtools.yang.data.impl.schema.builder.impl.valid.DataValidationException;
import org.opendaylight.yangtools.yang.model.api.ListSchemaNode;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
-
public final class ImmutableMapEntryNodeSchemaAwareBuilder extends ImmutableMapEntryNodeBuilder{
private final ListSchemaNode schema;
private YangInstanceIdentifier.NodeIdentifierWithPredicates constructNodeIdentifier() {
Collection<QName> keys = schema.getKeyDefinition();
- if(keys.isEmpty()) {
+ if (keys.isEmpty()) {
keys = childrenQNamesToPaths.keySet();
}
- final Map<QName, Object> keysToValues = Maps.newHashMap();
+ final Map<QName, Object> keysToValues = new LinkedHashMap<>();
for (QName key : keys) {
- final DataContainerChild<?, ?> valueForKey = getChild(childrenQNamesToPaths.get(key));
+ final DataContainerChild<?, ?> valueForKey = getChild(childrenQNamesToPaths.get(key));
DataValidationException.checkListKey(valueForKey, key, new YangInstanceIdentifier.NodeIdentifierWithPredicates(
- schema.getQName(), keysToValues));
+ schema.getQName(), keysToValues));
keysToValues.put(key, valueForKey.getValue());
}
package org.opendaylight.yangtools.yang.data.impl.codec.xml;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import com.google.common.io.ByteSource;
-
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
-
import javax.activation.UnsupportedDataTypeException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
-
import org.custommonkey.xmlunit.XMLUnit;
import org.junit.Before;
import org.junit.Test;
+import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
+import org.opendaylight.yangtools.yang.data.api.Node;
import org.opendaylight.yangtools.yang.data.api.SimpleNode;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
"<ref xmlns:ltha=\"urn:opendaylight:controller:rpc:test\">/ltha:cont/ltha:l[ltha:id='id']</ref>\n" +
"</input>";
+ public static final String RPC_REPLY = "<rpc-reply xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" message-id=\"m-1\">\n" +
+ " <ok/>\n" +
+ "</rpc-reply>";
+
private SchemaContext schema;
private RpcDefinition testRpc;
XMLUnit.compareXML(inputDocument, serializedDocument);
}
+ @Test
+ public void testRpcReplyToDom() throws Exception {
+ final Document reply = readXmlToDocument(RPC_REPLY);
+ final CompositeNode domNodes = XmlDocumentUtils.rpcReplyToDomNodes(reply, QName.create("urn:opendaylight:controller:rpc:test", "2014-07-28", "test"), schema);
+ assertEquals(1, domNodes.getValue().size());
+ final Node<?> outputNode = domNodes.getValue().get(0);
+ assertTrue(outputNode instanceof CompositeNode);
+ assertEquals(1, ((CompositeNode) outputNode).getValue().size());
+ final Node<?> okNode = ((CompositeNode) outputNode).getValue().get(0);
+ assertEquals("ok", okNode.getNodeType().getLocalName());
+ }
+
public static Document readXmlToDocument(final String xmlContent) throws SAXException, IOException {
return readXmlToDocument(new ByteArrayInputStream(xmlContent.getBytes(Charsets.UTF_8)));
}
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
-
import java.net.URI;
import java.util.Iterator;
-
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
-
import org.opendaylight.yangtools.yang.common.QName;
abstract class AbstractNamespaceCodec {
return null;
}
- final String prefix = it.next().trim();
- if (prefix.isEmpty()) {
- return null;
- }
- // it is not "prefix:value"
- if (!it.hasNext()) {
+ final String first = it.next().trim();
+ if (first.isEmpty()) {
return null;
}
- final String identifier = it.next().trim();
+ final String identifier;
+ final String prefix;
+ if (it.hasNext()) {
+ // It is "prefix:value"
+ prefix = first;
+ identifier = it.next().trim();
+ } else {
+ prefix = "";
+ identifier = first;
+ }
if (identifier.isEmpty()) {
return null;
}
import com.google.common.base.Splitter;
import java.util.ArrayList;
-import java.util.HashMap;
import java.util.Iterator;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
// predicates
final Matcher matcher = PREDICATE_PATTERN.matcher(xPathArgument);
- final Map<QName, Object> predicates = new HashMap<>();
+ final Map<QName, Object> predicates = new LinkedHashMap<>();
QName currentQName = mainQName;
while (matcher.find()) {
instance.userOrdered = userOrdered;
// KEY
- if (keys == null) {
- instance.keyDefinition = ImmutableList.of();
- } else {
+ if (keys != null) {
keyDefinition = new ArrayList<>();
for (String key : keys) {
DataSchemaNode keyPart = instance.getDataChildByName(key);
throw new YangParseException(getModuleName(), getLine(), "Failed to resolve list key for name "
+ key);
}
- keyDefinition.add(keyPart.getQName());
+
+ final QName qname = keyPart.getQName();
+ if (!keyDefinition.contains(qname)) {
+ keyDefinition.add(qname);
+ }
}
instance.keyDefinition = ImmutableList.copyOf(keyDefinition);
+ } else {
+ instance.keyDefinition = ImmutableList.of();
}
// ORIGINAL NODE