X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?a=blobdiff_plain;f=yang%2Fyang-data-codec-gson%2Fsrc%2Fmain%2Fjava%2Forg%2Fopendaylight%2Fyangtools%2Fyang%2Fdata%2Fcodec%2Fgson%2FJSONNormalizedNodeStreamWriter.java;h=e5dc228b334a3dd88802f2a74d56cbb5c916a652;hb=f7fe89b85f540dbe7d1fc051f2082f2ce571321a;hp=a0488ee6ae8f75e3d0441e714e9d1d5dfb533d9f;hpb=05dec17ecf753541133f8a852f5e16b157efef87;p=yangtools.git diff --git a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java index a0488ee6ae..e5dc228b33 100644 --- a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java +++ b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java @@ -8,23 +8,19 @@ package org.opendaylight.yangtools.yang.data.codec.gson; import com.google.common.base.Preconditions; -import com.google.common.base.Strings; import com.google.gson.stream.JsonWriter; - import java.io.IOException; -import java.io.Writer; import java.net.URI; - +import org.opendaylight.yangtools.yang.common.QName; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.AugmentationIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates; import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter; import org.opendaylight.yangtools.yang.data.impl.codec.SchemaTracker; import org.opendaylight.yangtools.yang.model.api.AnyXmlSchemaNode; -import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode; import org.opendaylight.yangtools.yang.model.api.LeafListSchemaNode; import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode; -import org.opendaylight.yangtools.yang.model.api.SchemaContext; +import org.opendaylight.yangtools.yang.model.api.SchemaNode; import org.opendaylight.yangtools.yang.model.api.SchemaPath; /** @@ -32,100 +28,74 @@ import org.opendaylight.yangtools.yang.model.api.SchemaPath; * * Values of leaf and leaf-list are NOT translated according to codecs. * - * FIXME: rewrite this in terms of {@link JsonWriter}. */ -public class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWriter { +public final class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWriter { /** * RFC6020 deviation: we are not required to emit empty containers unless they * are marked as 'presence'. */ private static final boolean DEFAULT_EMIT_EMPTY_CONTAINERS = true; - private final SchemaContext schemaContext; private final SchemaTracker tracker; - private final CodecFactory codecs; - private final Writer writer; - private final String indent; + private final JSONCodecFactory codecs; + private final JsonWriter writer; private JSONStreamWriterContext context; - private JSONNormalizedNodeStreamWriter(final SchemaContext schemaContext, - final Writer writer, final int indentSize) { - this(schemaContext, SchemaPath.ROOT, writer, null, indentSize); - } - - private JSONNormalizedNodeStreamWriter(final SchemaContext schemaContext, final SchemaPath path, - final Writer writer, final URI initialNs, final int indentSize) { - this.schemaContext = Preconditions.checkNotNull(schemaContext); - this.writer = Preconditions.checkNotNull(writer); - - Preconditions.checkArgument(indentSize >= 0, "Indent size must be non-negative"); - if (indentSize != 0) { - indent = Strings.repeat(" ", indentSize); - } else { - indent = null; - } - this.codecs = CodecFactory.create(schemaContext); - this.tracker = SchemaTracker.create(schemaContext, path); - this.context = new JSONStreamWriterRootContext(initialNs); + private JSONNormalizedNodeStreamWriter(final JSONCodecFactory codecFactory, final SchemaPath path, final JsonWriter JsonWriter, final JSONStreamWriterRootContext rootContext) { + this.writer = Preconditions.checkNotNull(JsonWriter); + this.codecs = Preconditions.checkNotNull(codecFactory); + this.tracker = SchemaTracker.create(codecFactory.getSchemaContext(), path); + this.context = Preconditions.checkNotNull(rootContext); } /** - * Create a new stream writer, which writes to the specified {@link Writer}. + * Create a new stream writer, which writes to the specified output stream. * - * @param schemaContext Schema context - * @param writer Output writer - * @return A stream writer instance - */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final Writer writer) { - return new JSONNormalizedNodeStreamWriter(schemaContext, writer, 0); - } - - /** - * Create a new stream writer, which writes to the specified {@link Writer}. + * The codec factory can be reused between multiple writers. * - * @param schemaContext Schema context - * @param path Root schemapath - * @param writer Output writer - * @return A stream writer instance - */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final SchemaPath path, final Writer writer) { - return new JSONNormalizedNodeStreamWriter(schemaContext, path, writer, null, 0); - } - - /** - * Create a new stream writer, which writes to the specified {@link Writer}. + * Returned writer is exclusive user of JsonWriter, which means it will start + * top-level JSON element and ends it. + * + * This instance of writer can be used only to emit one top level element, + * otherwise it will produce incorrect JSON. * - * @param schemaContext Schema context - * @param path Root schemapath - * @param writer Output writer + * @param codecFactory JSON codec factory + * @param path Schema Path * @param initialNs Initial namespace + * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final SchemaPath path, - final URI initialNs, final Writer writer) { - return new JSONNormalizedNodeStreamWriter(schemaContext, path, writer, initialNs, 0); + public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory, final SchemaPath path, final URI initialNs, final JsonWriter jsonWriter) { + return new JSONNormalizedNodeStreamWriter(codecFactory, path, jsonWriter, new JSONStreamWriterExclusiveRootContext(initialNs)); } /** * Create a new stream writer, which writes to the specified output stream. * - * @param schemaContext Schema context - * @param writer Output writer - * @param indentSize indentation size + * The codec factory can be reused between multiple writers. + * + * Returned writer can be used emit multiple top level element, + * but does not start / close parent JSON object, which must be done + * by user providing {@code jsonWriter} instance in order for + * JSON to be valid. + * + * @param codecFactory JSON codec factory + * @param path Schema Path + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final Writer writer, final int indentSize) { - return new JSONNormalizedNodeStreamWriter(schemaContext, writer, indentSize); + public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory, final SchemaPath path, final URI initialNs, final JsonWriter jsonWriter) { + return new JSONNormalizedNodeStreamWriter(codecFactory, path, jsonWriter, new JSONStreamWriterSharedRootContext(initialNs)); } @Override public void leafNode(final NodeIdentifier name, final Object value) throws IOException { final LeafSchemaNode schema = tracker.leafNode(name); - final JSONCodec codec = codecs.codecFor(schema.getType()); - - context.emittingChild(schemaContext, writer, indent); - context.writeJsonIdentifier(schemaContext, writer, name.getNodeType()); - writeValue(codec.serialize(value), codec.needQuotes()); + final JSONCodec codec = codecs.codecFor(schema); + context.emittingChild(codecs.getSchemaContext(), writer); + context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType()); + writeValue(value, codec); } @Override @@ -135,12 +105,17 @@ public class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWrite } @Override - public void leafSetEntryNode(final Object value) throws IOException { + public void leafSetEntryNode(final QName name, final Object value) throws IOException { final LeafListSchemaNode schema = tracker.leafSetEntryNode(); - final JSONCodec codec = codecs.codecFor(schema.getType()); + final JSONCodec codec = codecs.codecFor(schema); + context.emittingChild(codecs.getSchemaContext(), writer); + writeValue(value, codec); + } - context.emittingChild(schemaContext, writer, indent); - writeValue(codec.serialize(value), codec.needQuotes()); + @Override + public void startOrderedLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { + tracker.startLeafSet(name); + context = new JSONStreamWriterListContext(context, name); } /* @@ -150,8 +125,11 @@ public class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWrite @SuppressWarnings("unused") @Override public void startContainerNode(final NodeIdentifier name, final int childSizeHint) throws IOException { - final ContainerSchemaNode schema = tracker.startContainerNode(name); - context = new JSONStreamWriterNamedObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS || schema.isPresenceContainer()); + final SchemaNode schema = tracker.startContainerNode(name); + + // FIXME this code ignores presence for containers + // but datastore does as well and it needs be fixed first (2399) + context = new JSONStreamWriterNamedObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS); } @Override @@ -199,30 +177,37 @@ public class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWrite @Override public void anyxmlNode(final NodeIdentifier name, final Object value) throws IOException { + @SuppressWarnings("unused") final AnyXmlSchemaNode schema = tracker.anyxmlNode(name); // FIXME: should have a codec based on this :) - context.emittingChild(schemaContext, writer, indent); - context.writeJsonIdentifier(schemaContext, writer, name.getNodeType()); - writeValue(String.valueOf(value), true); + context.emittingChild(codecs.getSchemaContext(), writer); + context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType()); + // FIXME this kind of serialization is incorrect since the value for AnyXml is now a DOMSource + writer.value(String.valueOf(value)); + } + + @Override + public void startYangModeledAnyXmlNode(final NodeIdentifier name, final int childSizeHint) throws IOException { + tracker.startYangModeledAnyXmlNode(name); + context = new JSONStreamWriterNamedObjectContext(context, name, true); } @Override public void endNode() throws IOException { tracker.endNode(); - context = context.endNode(schemaContext, writer, indent); - } + context = context.endNode(codecs.getSchemaContext(), writer); - private void writeValue(final String str, final boolean needQuotes) throws IOException { - if (needQuotes) { - writer.append('"'); - writer.append(str); - writer.append('"'); - } else { - writer.append(str); + if(context instanceof JSONStreamWriterRootContext) { + context.emitEnd(writer); } } + private void writeValue(final Object value, final JSONCodec codec) + throws IOException { + codec.serializeToWriter(writer,value); + } + @Override public void flush() throws IOException { writer.flush(); @@ -230,8 +215,7 @@ public class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWrite @Override public void close() throws IOException { - writer.flush(); + flush(); writer.close(); } - }