X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?a=blobdiff_plain;f=yang%2Fyang-data-codec-gson%2Fsrc%2Fmain%2Fjava%2Forg%2Fopendaylight%2Fyangtools%2Fyang%2Fdata%2Fcodec%2Fgson%2FJSONNormalizedNodeStreamWriter.java;h=67d7e247e32da42404a4e1f6a080b155c5f329a3;hb=57b893fcb28a5955d308da8e4a19785422075ebf;hp=b03771d4c49698c676e1feb2f332a6c4257a29fc;hpb=deeed82678146537e87a6d9c5a5a1c2f21fd57df;p=yangtools.git diff --git a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java index b03771d4c4..67283655db 100644 --- a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java +++ b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java @@ -7,345 +7,451 @@ */ package org.opendaylight.yangtools.yang.data.codec.gson; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.gson.stream.JsonWriter; +import static java.util.Objects.requireNonNull; +import static org.w3c.dom.Node.ELEMENT_NODE; +import static org.w3c.dom.Node.TEXT_NODE; +import com.google.gson.stream.JsonWriter; import java.io.IOException; -import java.io.Writer; import java.net.URI; -import java.util.ArrayDeque; -import java.util.Deque; - -import org.opendaylight.yangtools.concepts.Codec; +import java.util.regex.Pattern; +import javax.annotation.RegEx; +import javax.xml.transform.dom.DOMSource; +import org.opendaylight.yangtools.yang.common.QName; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.AugmentationIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates; import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter; import org.opendaylight.yangtools.yang.data.impl.codec.SchemaTracker; import org.opendaylight.yangtools.yang.model.api.AnyXmlSchemaNode; +import org.opendaylight.yangtools.yang.model.api.DataNodeContainer; import org.opendaylight.yangtools.yang.model.api.LeafListSchemaNode; import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode; -import org.opendaylight.yangtools.yang.model.api.Module; -import org.opendaylight.yangtools.yang.model.api.SchemaContext; +import org.opendaylight.yangtools.yang.model.api.SchemaNode; +import org.opendaylight.yangtools.yang.model.api.SchemaPath; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.w3c.dom.Text; /** * This implementation will create JSON output as output stream. * + *

* Values of leaf and leaf-list are NOT translated according to codecs. - * - * FIXME: rewrite this in terms of {@link JsonWriter}. */ -public class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWriter { - - private static enum NodeType { - OBJECT, - LIST, - OTHER, - } - - private static class TypeInfo { - private boolean hasAtLeastOneChild = false; - private final NodeType type; - private final URI uri; - - public TypeInfo(final NodeType type, final URI uri) { - this.type = type; - this.uri = uri; - } - - public void setHasAtLeastOneChild(final boolean hasChildren) { - this.hasAtLeastOneChild = hasChildren; +public abstract class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWriter { + private static final class Exclusive extends JSONNormalizedNodeStreamWriter { + Exclusive(final JSONCodecFactory codecFactory, final SchemaTracker tracker, final JsonWriter writer, + final JSONStreamWriterRootContext rootContext) { + super(codecFactory, tracker, writer, rootContext); } - public NodeType getType() { - return type; + @Override + public void close() throws IOException { + flush(); + closeWriter(); } + } - public URI getNamespace() { - return uri; + private static final class Nested extends JSONNormalizedNodeStreamWriter { + Nested(final JSONCodecFactory codecFactory, final SchemaTracker tracker, final JsonWriter writer, + final JSONStreamWriterRootContext rootContext) { + super(codecFactory, tracker, writer, rootContext); } - public boolean hasAtLeastOneChild() { - return hasAtLeastOneChild; + @Override + public void close() throws IOException { + flush(); + // The caller "owns" the writer, let them close it } } - private final Deque stack = new ArrayDeque<>(); - private final SchemaContext schemaContext; - private final CodecFactory codecs; - private final SchemaTracker tracker; - private final Writer writer; - private final String indent; + /** + * RFC6020 deviation: we are not required to emit empty containers unless they + * are marked as 'presence'. + */ + private static final boolean DEFAULT_EMIT_EMPTY_CONTAINERS = true; - private URI currentNamespace = null; - private int currentDepth = 0; + @RegEx + private static final String NUMBER_STRING = "-?\\d+(\\.\\d+)?"; + private static final Pattern NUMBER_PATTERN = Pattern.compile(NUMBER_STRING); - private JSONNormalizedNodeStreamWriter(final SchemaContext schemaContext, - final Writer writer, final int indentSize) { - this.schemaContext = Preconditions.checkNotNull(schemaContext); - this.writer = Preconditions.checkNotNull(writer); + @RegEx + private static final String NOT_DECIMAL_NUMBER_STRING = "-?\\d+"; + private static final Pattern NOT_DECIMAL_NUMBER_PATTERN = Pattern.compile(NOT_DECIMAL_NUMBER_STRING); - Preconditions.checkArgument(indentSize >= 0, "Indent size must be non-negative"); - if (indentSize != 0) { - indent = Strings.repeat(" ", indentSize); - } else { - indent = null; - } + private final SchemaTracker tracker; + private final JSONCodecFactory codecs; + private final JsonWriter writer; + private JSONStreamWriterContext context; + + JSONNormalizedNodeStreamWriter(final JSONCodecFactory codecFactory, final SchemaTracker tracker, + final JsonWriter writer, final JSONStreamWriterRootContext rootContext) { + this.writer = requireNonNull(writer); + this.codecs = requireNonNull(codecFactory); + this.tracker = requireNonNull(tracker); + this.context = requireNonNull(rootContext); + } + + /** + * Create a new stream writer, which writes to the specified output stream. + * + *

+ * The codec factory can be reused between multiple writers. + * + *

+ * Returned writer is exclusive user of JsonWriter, which means it will start + * top-level JSON element and ends it. + * + *

+ * This instance of writer can be used only to emit one top level element, + * otherwise it will produce incorrect JSON. Closing this instance will close + * the writer too. + * + * @param codecFactory JSON codec factory + * @param path Schema Path + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter + * @return A stream writer instance + */ + public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory, + final SchemaPath path, final URI initialNs, final JsonWriter jsonWriter) { + return new Exclusive(codecFactory, SchemaTracker.create(codecFactory.getSchemaContext(), path), jsonWriter, + new JSONStreamWriterExclusiveRootContext(initialNs)); + } - this.codecs = CodecFactory.create(schemaContext); - this.tracker = SchemaTracker.create(schemaContext); + /** + * Create a new stream writer, which writes to the specified output stream. + * + *

+ * The codec factory can be reused between multiple writers. + * + *

+ * Returned writer is exclusive user of JsonWriter, which means it will start + * top-level JSON element and ends it. + * + *

+ * This instance of writer can be used only to emit one top level element, + * otherwise it will produce incorrect JSON. Closing this instance will close + * the writer too. + * + * @param codecFactory JSON codec factory + * @param rootNode Root node + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter + * @return A stream writer instance + */ + public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory, + final DataNodeContainer rootNode, final URI initialNs, final JsonWriter jsonWriter) { + return new Exclusive(codecFactory, SchemaTracker.create(rootNode), jsonWriter, + new JSONStreamWriterExclusiveRootContext(initialNs)); } /** - * Create a new stream writer, which writes to the specified {@link Writer}. + * Create a new stream writer, which writes to the specified output stream. + * + *

+ * The codec factory can be reused between multiple writers. * - * @param schemaContext Schema context - * @param writer Output writer + *

+ * Returned writer can be used emit multiple top level element, + * but does not start / close parent JSON object, which must be done + * by user providing {@code jsonWriter} instance in order for + * JSON to be valid. Closing this instance will not + * close the wrapped writer; the caller must take care of that. + * + * @param codecFactory JSON codec factory + * @param path Schema Path + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final Writer writer) { - return new JSONNormalizedNodeStreamWriter(schemaContext, writer, 0); + public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory, + final SchemaPath path, final URI initialNs, final JsonWriter jsonWriter) { + return new Nested(codecFactory, SchemaTracker.create(codecFactory.getSchemaContext(), path), jsonWriter, + new JSONStreamWriterSharedRootContext(initialNs)); } /** * Create a new stream writer, which writes to the specified output stream. * - * @param schemaContext Schema context - * @param writer Output writer - * @param indentSize indentation size + *

+ * The codec factory can be reused between multiple writers. + * + *

+ * Returned writer can be used emit multiple top level element, + * but does not start / close parent JSON object, which must be done + * by user providing {@code jsonWriter} instance in order for + * JSON to be valid. Closing this instance will not + * close the wrapped writer; the caller must take care of that. + * + * @param codecFactory JSON codec factory + * @param rootNode Root node + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final Writer writer, final int indentSize) { - return new JSONNormalizedNodeStreamWriter(schemaContext, writer, indentSize); + public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory, + final DataNodeContainer rootNode, final URI initialNs, final JsonWriter jsonWriter) { + return new Nested(codecFactory, SchemaTracker.create(rootNode), jsonWriter, + new JSONStreamWriterSharedRootContext(initialNs)); } @Override - public void leafNode(final NodeIdentifier name, final Object value) throws IOException { + public final void leafNode(final NodeIdentifier name, final Object value) throws IOException { final LeafSchemaNode schema = tracker.leafNode(name); - final Codec codec = codecs.codecFor(schema.getType()); - - separateElementFromPreviousElement(); - writeJsonIdentifier(name); - currentNamespace = stack.peek().getNamespace(); - writeValue(String.valueOf(codec.serialize(value))); - separateNextSiblingsWithComma(); + final JSONCodec codec = codecs.codecFor(schema); + context.emittingChild(codecs.getSchemaContext(), writer); + context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType()); + writeValue(value, codec); } @Override - public void startLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startLeafSet(name); - - separateElementFromPreviousElement(); - stack.push(new TypeInfo(NodeType.LIST, name.getNodeType().getNamespace())); - writeJsonIdentifier(name); - writeStartList(); - indentRight(); + context = new JSONStreamWriterListContext(context, name); } @Override - public void leafSetEntryNode(final Object value) throws IOException { - final LeafListSchemaNode schema = tracker.leafSetEntryNode(); - final Codec codec = codecs.codecFor(schema.getType()); + public final void leafSetEntryNode(final QName name, final Object value) throws IOException { + final LeafListSchemaNode schema = tracker.leafSetEntryNode(name); + final JSONCodec codec = codecs.codecFor(schema); + context.emittingChild(codecs.getSchemaContext(), writer); + writeValue(value, codec); + } - separateElementFromPreviousElement(); - writeValue(String.valueOf(codec.serialize(value))); - separateNextSiblingsWithComma(); + @Override + public final void startOrderedLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { + tracker.startLeafSet(name); + context = new JSONStreamWriterListContext(context, name); } + /* + * Warning suppressed due to static final constant which triggers a warning + * for the call to schema.isPresenceContainer(). + */ + @SuppressWarnings("unused") @Override - public void startContainerNode(final NodeIdentifier name, final int childSizeHint) throws IOException { - tracker.startContainerNode(name); + public final void startContainerNode(final NodeIdentifier name, final int childSizeHint) throws IOException { + final SchemaNode schema = tracker.startContainerNode(name); - separateElementFromPreviousElement(); - stack.push(new TypeInfo(NodeType.OBJECT, name.getNodeType().getNamespace())); - writeJsonIdentifier(name); - writeStartObject(); - indentRight(); + // FIXME this code ignores presence for containers + // but datastore does as well and it needs be fixed first (2399) + context = new JSONStreamWriterNamedObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS); } @Override - public void startUnkeyedList(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startUnkeyedList(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startList(name); - - separateElementFromPreviousElement(); - stack.push(new TypeInfo(NodeType.LIST, name.getNodeType().getNamespace())); - writeJsonIdentifier(name); - writeStartList(); - indentRight(); + context = new JSONStreamWriterListContext(context, name); } @Override - public void startUnkeyedListItem(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startUnkeyedListItem(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startListItem(name); - - stack.push(new TypeInfo(NodeType.OBJECT, name.getNodeType().getNamespace())); - separateElementFromPreviousElement(); - writeStartObject(); - indentRight(); + context = new JSONStreamWriterObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS); } @Override - public void startMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startList(name); - - separateElementFromPreviousElement(); - stack.push(new TypeInfo(NodeType.LIST, name.getNodeType().getNamespace())); - writeJsonIdentifier(name); - writeStartList(); - indentRight(); + context = new JSONStreamWriterListContext(context, name); } @Override - public void startMapEntryNode(final NodeIdentifierWithPredicates identifier, final int childSizeHint) + public final void startMapEntryNode(final NodeIdentifierWithPredicates identifier, final int childSizeHint) throws IOException { tracker.startListItem(identifier); - - stack.push(new TypeInfo(NodeType.OBJECT, identifier.getNodeType().getNamespace())); - separateElementFromPreviousElement(); - writeStartObject(); - indentRight(); + context = new JSONStreamWriterObjectContext(context, identifier, DEFAULT_EMIT_EMPTY_CONTAINERS); } @Override - public void startOrderedMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { - tracker.startListItem(name); - - stack.push(new TypeInfo(NodeType.LIST, name.getNodeType().getNamespace())); - separateElementFromPreviousElement(); - writeJsonIdentifier(name); - writeStartList(); - indentRight(); + public final void startOrderedMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { + tracker.startList(name); + context = new JSONStreamWriterListContext(context, name); } @Override - public void startChoiceNode(final NodeIdentifier name, final int childSizeHint) throws IllegalArgumentException { + public final void startChoiceNode(final NodeIdentifier name, final int childSizeHint) { tracker.startChoiceNode(name); - handleInvisibleNode(name.getNodeType().getNamespace()); + context = new JSONStreamWriterInvisibleContext(context); } @Override - public void startAugmentationNode(final AugmentationIdentifier identifier) throws IllegalArgumentException { + public final void startAugmentationNode(final AugmentationIdentifier identifier) { tracker.startAugmentationNode(identifier); - handleInvisibleNode(currentNamespace); + context = new JSONStreamWriterInvisibleContext(context); } @Override - public void anyxmlNode(final NodeIdentifier name, final Object value) throws IOException { + public final void anyxmlNode(final NodeIdentifier name, final Object value) throws IOException { + @SuppressWarnings("unused") final AnyXmlSchemaNode schema = tracker.anyxmlNode(name); // FIXME: should have a codec based on this :) - separateElementFromPreviousElement(); - writeJsonIdentifier(name); - currentNamespace = stack.peek().getNamespace(); - writeValue(value.toString()); - separateNextSiblingsWithComma(); + context.emittingChild(codecs.getSchemaContext(), writer); + context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType()); + + writeAnyXmlValue((DOMSource) value); } @Override - public void endNode() throws IOException { + public final void startYangModeledAnyXmlNode(final NodeIdentifier name, final int childSizeHint) + throws IOException { + tracker.startYangModeledAnyXmlNode(name); + context = new JSONStreamWriterNamedObjectContext(context, name, true); + } + + @Override + public final void endNode() throws IOException { tracker.endNode(); + context = context.endNode(codecs.getSchemaContext(), writer); - final TypeInfo t = stack.pop(); - switch (t.getType()) { - case LIST: - indentLeft(); - newLine(); - writer.append(']'); - break; - case OBJECT: - indentLeft(); - newLine(); - writer.append('}'); - break; - default: - break; + if (context instanceof JSONStreamWriterRootContext) { + context.emitEnd(writer); } + } - currentNamespace = stack.isEmpty() ? null : stack.peek().getNamespace(); - separateNextSiblingsWithComma(); + @Override + public final void flush() throws IOException { + writer.flush(); } - private void separateElementFromPreviousElement() throws IOException { - if (!stack.isEmpty() && stack.peek().hasAtLeastOneChild()) { - writer.append(','); - } - newLine(); + final void closeWriter() throws IOException { + writer.close(); } - private void newLine() throws IOException { - if (indent != null) { - writer.append('\n'); + @SuppressWarnings("unchecked") + private void writeValue(final Object value, final JSONCodec codec) throws IOException { + ((JSONCodec) codec).writeValue(writer, value); + } - for (int i = 0; i < currentDepth; i++) { - writer.append(indent); - } - } + private void writeAnyXmlValue(final DOMSource anyXmlValue) throws IOException { + writeXmlNode(anyXmlValue.getNode()); } - private void separateNextSiblingsWithComma() { - if (!stack.isEmpty()) { - stack.peek().setHasAtLeastOneChild(true); + private void writeXmlNode(final Node node) throws IOException { + if (isArrayElement(node)) { + writeArrayContent(node); + return; + } + final Element firstChildElement = getFirstChildElement(node); + if (firstChildElement == null) { + writeXmlValue(node); + } else { + writeObjectContent(firstChildElement); } } - /** - * Invisible nodes have to be also pushed to stack because of pairing of start*() and endNode() methods. Information - * about child existing (due to printing comma) has to be transfered to invisible node. - */ - private void handleInvisibleNode(final URI uri) { - TypeInfo typeInfo = new TypeInfo(NodeType.OTHER, uri); - typeInfo.setHasAtLeastOneChild(stack.peek().hasAtLeastOneChild()); - stack.push(typeInfo); + private void writeArrayContent(final Node node) throws IOException { + writer.beginArray(); + handleArray(node); + writer.endArray(); } - private void writeStartObject() throws IOException { - writer.append('{'); + private void writeObjectContent(final Element firstChildElement) throws IOException { + writer.beginObject(); + writeObject(firstChildElement); + writer.endObject(); } - private void writeStartList() throws IOException { - writer.append('['); + private static boolean isArrayElement(final Node node) { + if (ELEMENT_NODE == node.getNodeType()) { + final String nodeName = node.getNodeName(); + for (Node nextNode = node.getNextSibling(); nextNode != null; nextNode = nextNode.getNextSibling()) { + if (ELEMENT_NODE == nextNode.getNodeType() && nodeName.equals(nextNode.getNodeName())) { + return true; + } + } + } + return false; } - private void writeModulName(final URI namespace) throws IOException { - if (this.currentNamespace == null || namespace != this.currentNamespace) { - Module module = schemaContext.findModuleByNamespaceAndRevision(namespace, null); - writer.append(module.getName()); - writer.append(':'); - currentNamespace = namespace; + private void handleArray(final Node node) throws IOException { + final Element parentNode = (Element)node.getParentNode(); + final NodeList elementsList = parentNode.getElementsByTagName(node.getNodeName()); + for (int i = 0, length = elementsList.getLength(); i < length; i++) { + final Node arrayElement = elementsList.item(i); + final Element parent = (Element)arrayElement.getParentNode(); + if (parentNode.isSameNode(parent)) { + final Element firstChildElement = getFirstChildElement(arrayElement); + if (firstChildElement != null) { + writeObjectContent(firstChildElement); + } else { + // It may be scalar + writeXmlValue(arrayElement); + } + } } } - private void writeValue(final String value) throws IOException { - writer.append('"'); - writer.append(value); - writer.append('"'); + private void writeObject(Node node) throws IOException { + String previousNodeName = ""; + while (node != null) { + if (ELEMENT_NODE == node.getNodeType()) { + if (!node.getNodeName().equals(previousNodeName)) { + previousNodeName = node.getNodeName(); + writer.name(node.getNodeName()); + writeXmlNode(node); + } + } + node = node.getNextSibling(); + } } - private void writeJsonIdentifier(final NodeIdentifier name) throws IOException { - writer.append('"'); - writeModulName(name.getNodeType().getNamespace()); - writer.append(name.getNodeType().getLocalName()); - writer.append("\":"); - } + private void writeXmlValue(final Node node) throws IOException { + Text firstChild = getFirstChildText(node); + String childNodeText = firstChild != null ? firstChild.getWholeText() : ""; + childNodeText = childNodeText != null ? childNodeText.trim() : ""; - private void indentRight() { - currentDepth++; + if (NUMBER_PATTERN.matcher(childNodeText).matches()) { + writer.value(parseNumber(childNodeText)); + return; + } + switch (childNodeText) { + case "null": + writer.nullValue(); + break; + case "false": + writer.value(false); + break; + case "true": + writer.value(true); + break; + default: + writer.value(childNodeText); + } } - private void indentLeft() { - currentDepth--; + private static Element getFirstChildElement(final Node node) { + final NodeList children = node.getChildNodes(); + for (int i = 0, length = children.getLength(); i < length; i++) { + final Node childNode = children.item(i); + if (ELEMENT_NODE == childNode.getNodeType()) { + return (Element) childNode; + } + } + return null; } - @Override - public void flush() throws IOException { - writer.flush(); + private static Text getFirstChildText(final Node node) { + final NodeList children = node.getChildNodes(); + for (int i = 0, length = children.getLength(); i < length; i++) { + final Node childNode = children.item(i); + if (TEXT_NODE == childNode.getNodeType()) { + return (Text) childNode; + } + } + return null; } - @Override - public void close() throws IOException { - writer.flush(); - writer.close(); - } + // json numbers are 64 bit wide floating point numbers - in java terms it is either long or double + private static Number parseNumber(final String numberText) { + if (NOT_DECIMAL_NUMBER_PATTERN.matcher(numberText).matches()) { + return Long.valueOf(numberText); + } + return Double.valueOf(numberText); + } }