X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?a=blobdiff_plain;f=yang%2Fyang-data-codec-gson%2Fsrc%2Fmain%2Fjava%2Forg%2Fopendaylight%2Fyangtools%2Fyang%2Fdata%2Fcodec%2Fgson%2FJSONNormalizedNodeStreamWriter.java;h=e5dc228b334a3dd88802f2a74d56cbb5c916a652;hb=f7fe89b85f540dbe7d1fc051f2082f2ce571321a;hp=4ce4d432df0b49bf5d55564f11ff9d1aa5e649f9;hpb=6f4d5ac112a86840f05c35c24da3ff3de6ae5be3;p=yangtools.git diff --git a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java index 4ce4d432df..e5dc228b33 100644 --- a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java +++ b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java @@ -8,14 +8,10 @@ package org.opendaylight.yangtools.yang.data.codec.gson; import com.google.common.base.Preconditions; -import com.google.common.base.Strings; import com.google.gson.stream.JsonWriter; import java.io.IOException; -import java.io.Writer; import java.net.URI; -import java.util.ArrayDeque; -import java.util.Deque; -import org.opendaylight.yangtools.concepts.Codec; +import org.opendaylight.yangtools.yang.common.QName; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.AugmentationIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates; @@ -24,8 +20,7 @@ import org.opendaylight.yangtools.yang.data.impl.codec.SchemaTracker; import org.opendaylight.yangtools.yang.model.api.AnyXmlSchemaNode; import org.opendaylight.yangtools.yang.model.api.LeafListSchemaNode; import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode; -import org.opendaylight.yangtools.yang.model.api.Module; -import org.opendaylight.yangtools.yang.model.api.SchemaContext; +import org.opendaylight.yangtools.yang.model.api.SchemaNode; import org.opendaylight.yangtools.yang.model.api.SchemaPath; /** @@ -33,347 +28,184 @@ import org.opendaylight.yangtools.yang.model.api.SchemaPath; * * Values of leaf and leaf-list are NOT translated according to codecs. * - * FIXME: rewrite this in terms of {@link JsonWriter}. */ -public class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWriter { - - private static enum NodeType { - OBJECT, - LIST, - OTHER, - } - - private static class TypeInfo { - private boolean hasAtLeastOneChild = false; - private final NodeType type; - private final URI uri; - - public TypeInfo(final NodeType type, final URI uri) { - this.type = type; - this.uri = uri; - } - - public void setHasAtLeastOneChild(final boolean hasChildren) { - this.hasAtLeastOneChild = hasChildren; - } - - public NodeType getType() { - return type; - } - - public URI getNamespace() { - return uri; - } - - public boolean hasAtLeastOneChild() { - return hasAtLeastOneChild; - } - } +public final class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWriter { + /** + * RFC6020 deviation: we are not required to emit empty containers unless they + * are marked as 'presence'. + */ + private static final boolean DEFAULT_EMIT_EMPTY_CONTAINERS = true; - private final Deque stack = new ArrayDeque<>(); - private final SchemaContext schemaContext; - private final CodecFactory codecs; private final SchemaTracker tracker; - private final Writer writer; - private final String indent; - - private URI currentNamespace = null; - private int currentDepth = 0; - - private JSONNormalizedNodeStreamWriter(final SchemaContext schemaContext, - final Writer writer, final int indentSize) { - this.schemaContext = Preconditions.checkNotNull(schemaContext); - this.writer = Preconditions.checkNotNull(writer); - - Preconditions.checkArgument(indentSize >= 0, "Indent size must be non-negative"); - if (indentSize != 0) { - indent = Strings.repeat(" ", indentSize); - } else { - indent = null; - } - - this.codecs = CodecFactory.create(schemaContext); - this.tracker = SchemaTracker.create(schemaContext); - } + private final JSONCodecFactory codecs; + private final JsonWriter writer; + private JSONStreamWriterContext context; - private JSONNormalizedNodeStreamWriter(final SchemaContext schemaContext, final SchemaPath path, - final Writer writer, final URI initialNs,final int indentSize) { - this.schemaContext = Preconditions.checkNotNull(schemaContext); - this.writer = Preconditions.checkNotNull(writer); - - Preconditions.checkArgument(indentSize >= 0, "Indent size must be non-negative"); - if (indentSize != 0) { - indent = Strings.repeat(" ", indentSize); - } else { - indent = null; - } - this.currentNamespace = initialNs; - this.codecs = CodecFactory.create(schemaContext); - this.tracker = SchemaTracker.create(schemaContext,path); + private JSONNormalizedNodeStreamWriter(final JSONCodecFactory codecFactory, final SchemaPath path, final JsonWriter JsonWriter, final JSONStreamWriterRootContext rootContext) { + this.writer = Preconditions.checkNotNull(JsonWriter); + this.codecs = Preconditions.checkNotNull(codecFactory); + this.tracker = SchemaTracker.create(codecFactory.getSchemaContext(), path); + this.context = Preconditions.checkNotNull(rootContext); } /** - * Create a new stream writer, which writes to the specified {@link Writer}. + * Create a new stream writer, which writes to the specified output stream. * - * @param schemaContext Schema context - * @param writer Output writer - * @return A stream writer instance - */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final Writer writer) { - return new JSONNormalizedNodeStreamWriter(schemaContext, writer, 0); - } - - /** - * Create a new stream writer, which writes to the specified {@link Writer}. + * The codec factory can be reused between multiple writers. * - * @param schemaContext Schema context - * @param writer Output writer - * @return A stream writer instance - */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, SchemaPath path,final Writer writer) { - return new JSONNormalizedNodeStreamWriter(schemaContext, path, writer, null, 0); - } - - /** - * Create a new stream writer, which writes to the specified {@link Writer}. + * Returned writer is exclusive user of JsonWriter, which means it will start + * top-level JSON element and ends it. + * + * This instance of writer can be used only to emit one top level element, + * otherwise it will produce incorrect JSON. * - * @param schemaContext Schema context - * @param writer Output writer + * @param codecFactory JSON codec factory + * @param path Schema Path * @param initialNs Initial namespace + * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, SchemaPath path,URI initialNs, final Writer writer) { - return new JSONNormalizedNodeStreamWriter(schemaContext, path, writer, initialNs, 0); + public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory, final SchemaPath path, final URI initialNs, final JsonWriter jsonWriter) { + return new JSONNormalizedNodeStreamWriter(codecFactory, path, jsonWriter, new JSONStreamWriterExclusiveRootContext(initialNs)); } /** * Create a new stream writer, which writes to the specified output stream. * - * @param schemaContext Schema context - * @param writer Output writer - * @param indentSize indentation size + * The codec factory can be reused between multiple writers. + * + * Returned writer can be used emit multiple top level element, + * but does not start / close parent JSON object, which must be done + * by user providing {@code jsonWriter} instance in order for + * JSON to be valid. + * + * @param codecFactory JSON codec factory + * @param path Schema Path + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final Writer writer, final int indentSize) { - return new JSONNormalizedNodeStreamWriter(schemaContext, writer, indentSize); + public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory, final SchemaPath path, final URI initialNs, final JsonWriter jsonWriter) { + return new JSONNormalizedNodeStreamWriter(codecFactory, path, jsonWriter, new JSONStreamWriterSharedRootContext(initialNs)); } @Override public void leafNode(final NodeIdentifier name, final Object value) throws IOException { final LeafSchemaNode schema = tracker.leafNode(name); - final Codec codec = codecs.codecFor(schema.getType()); - - separateElementFromPreviousElement(); - writeJsonIdentifier(name); - currentNamespace = stack.peek().getNamespace(); - writeValue(String.valueOf(codec.serialize(value))); - separateNextSiblingsWithComma(); + final JSONCodec codec = codecs.codecFor(schema); + context.emittingChild(codecs.getSchemaContext(), writer); + context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType()); + writeValue(value, codec); } @Override public void startLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startLeafSet(name); - - separateElementFromPreviousElement(); - stack.push(new TypeInfo(NodeType.LIST, name.getNodeType().getNamespace())); - writeJsonIdentifier(name); - writeStartList(); - indentRight(); + context = new JSONStreamWriterListContext(context, name); } @Override - public void leafSetEntryNode(final Object value) throws IOException { + public void leafSetEntryNode(final QName name, final Object value) throws IOException { final LeafListSchemaNode schema = tracker.leafSetEntryNode(); - final Codec codec = codecs.codecFor(schema.getType()); + final JSONCodec codec = codecs.codecFor(schema); + context.emittingChild(codecs.getSchemaContext(), writer); + writeValue(value, codec); + } - separateElementFromPreviousElement(); - writeValue(String.valueOf(codec.serialize(value))); - separateNextSiblingsWithComma(); + @Override + public void startOrderedLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { + tracker.startLeafSet(name); + context = new JSONStreamWriterListContext(context, name); } + /* + * Warning suppressed due to static final constant which triggers a warning + * for the call to schema.isPresenceContainer(). + */ + @SuppressWarnings("unused") @Override public void startContainerNode(final NodeIdentifier name, final int childSizeHint) throws IOException { - tracker.startContainerNode(name); + final SchemaNode schema = tracker.startContainerNode(name); - separateElementFromPreviousElement(); - stack.push(new TypeInfo(NodeType.OBJECT, name.getNodeType().getNamespace())); - writeJsonIdentifier(name); - writeStartObject(); - indentRight(); + // FIXME this code ignores presence for containers + // but datastore does as well and it needs be fixed first (2399) + context = new JSONStreamWriterNamedObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS); } @Override public void startUnkeyedList(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startList(name); - - separateElementFromPreviousElement(); - stack.push(new TypeInfo(NodeType.LIST, name.getNodeType().getNamespace())); - writeJsonIdentifier(name); - writeStartList(); - indentRight(); + context = new JSONStreamWriterListContext(context, name); } @Override public void startUnkeyedListItem(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startListItem(name); - - separateElementFromPreviousElement(); - stack.push(new TypeInfo(NodeType.OBJECT, name.getNodeType().getNamespace())); - writeStartObject(); - indentRight(); + context = new JSONStreamWriterObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS); } @Override public void startMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startList(name); - - separateElementFromPreviousElement(); - stack.push(new TypeInfo(NodeType.LIST, name.getNodeType().getNamespace())); - writeJsonIdentifier(name); - writeStartList(); - indentRight(); + context = new JSONStreamWriterListContext(context, name); } @Override public void startMapEntryNode(final NodeIdentifierWithPredicates identifier, final int childSizeHint) throws IOException { tracker.startListItem(identifier); - separateElementFromPreviousElement(); - stack.push(new TypeInfo(NodeType.OBJECT, identifier.getNodeType().getNamespace())); - - - writeStartObject(); - indentRight(); + context = new JSONStreamWriterObjectContext(context, identifier, DEFAULT_EMIT_EMPTY_CONTAINERS); } @Override public void startOrderedMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { - tracker.startListItem(name); - - stack.push(new TypeInfo(NodeType.LIST, name.getNodeType().getNamespace())); - separateElementFromPreviousElement(); - writeJsonIdentifier(name); - writeStartList(); - indentRight(); + tracker.startList(name); + context = new JSONStreamWriterListContext(context, name); } @Override - public void startChoiceNode(final NodeIdentifier name, final int childSizeHint) throws IllegalArgumentException { + public void startChoiceNode(final NodeIdentifier name, final int childSizeHint) { tracker.startChoiceNode(name); - handleInvisibleNode(name.getNodeType().getNamespace()); + context = new JSONStreamWriterInvisibleContext(context); } @Override - public void startAugmentationNode(final AugmentationIdentifier identifier) throws IllegalArgumentException { + public void startAugmentationNode(final AugmentationIdentifier identifier) { tracker.startAugmentationNode(identifier); - handleInvisibleNode(currentNamespace); + context = new JSONStreamWriterInvisibleContext(context); } @Override public void anyxmlNode(final NodeIdentifier name, final Object value) throws IOException { + @SuppressWarnings("unused") final AnyXmlSchemaNode schema = tracker.anyxmlNode(name); // FIXME: should have a codec based on this :) - separateElementFromPreviousElement(); - writeJsonIdentifier(name); - currentNamespace = stack.peek().getNamespace(); - writeValue(value.toString()); - separateNextSiblingsWithComma(); + context.emittingChild(codecs.getSchemaContext(), writer); + context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType()); + // FIXME this kind of serialization is incorrect since the value for AnyXml is now a DOMSource + writer.value(String.valueOf(value)); } @Override - public void endNode() throws IOException { - tracker.endNode(); - - final TypeInfo t = stack.pop(); - switch (t.getType()) { - case LIST: - indentLeft(); - newLine(); - writer.append(']'); - break; - case OBJECT: - indentLeft(); - newLine(); - writer.append('}'); - break; - default: - break; - } - - currentNamespace = stack.isEmpty() ? null : stack.peek().getNamespace(); - separateNextSiblingsWithComma(); - } - - private void separateElementFromPreviousElement() throws IOException { - if (!stack.isEmpty() && stack.peek().hasAtLeastOneChild()) { - writer.append(','); - } - newLine(); + public void startYangModeledAnyXmlNode(final NodeIdentifier name, final int childSizeHint) throws IOException { + tracker.startYangModeledAnyXmlNode(name); + context = new JSONStreamWriterNamedObjectContext(context, name, true); } - private void newLine() throws IOException { - if (indent != null) { - writer.append('\n'); - - for (int i = 0; i < currentDepth; i++) { - writer.append(indent); - } - } - } - - private void separateNextSiblingsWithComma() { - if (!stack.isEmpty()) { - stack.peek().setHasAtLeastOneChild(true); - } - } - - /** - * Invisible nodes have to be also pushed to stack because of pairing of start*() and endNode() methods. Information - * about child existing (due to printing comma) has to be transfered to invisible node. - */ - private void handleInvisibleNode(final URI uri) { - TypeInfo typeInfo = new TypeInfo(NodeType.OTHER, uri); - typeInfo.setHasAtLeastOneChild(stack.peek().hasAtLeastOneChild()); - stack.push(typeInfo); - } - - private void writeStartObject() throws IOException { - writer.append('{'); - } - - private void writeStartList() throws IOException { - writer.append('['); - } + @Override + public void endNode() throws IOException { + tracker.endNode(); + context = context.endNode(codecs.getSchemaContext(), writer); - private void writeModulName(final URI namespace) throws IOException { - if (this.currentNamespace == null || namespace != this.currentNamespace) { - Module module = schemaContext.findModuleByNamespaceAndRevision(namespace, null); - writer.append(module.getName()); - writer.append(':'); - currentNamespace = namespace; + if(context instanceof JSONStreamWriterRootContext) { + context.emitEnd(writer); } } - private void writeValue(final String value) throws IOException { - writer.append('"'); - writer.append(value); - writer.append('"'); - } - - private void writeJsonIdentifier(final NodeIdentifier name) throws IOException { - writer.append('"'); - writeModulName(name.getNodeType().getNamespace()); - writer.append(name.getNodeType().getLocalName()); - writer.append("\":"); - } - - private void indentRight() { - currentDepth++; - } - - private void indentLeft() { - currentDepth--; + private void writeValue(final Object value, final JSONCodec codec) + throws IOException { + codec.serializeToWriter(writer,value); } @Override @@ -383,8 +215,7 @@ public class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWrite @Override public void close() throws IOException { - writer.flush(); + flush(); writer.close(); } - }