X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?a=blobdiff_plain;f=yang%2Fyang-data-codec-gson%2Fsrc%2Fmain%2Fjava%2Forg%2Fopendaylight%2Fyangtools%2Fyang%2Fdata%2Fcodec%2Fgson%2FJSONNormalizedNodeStreamWriter.java;h=67e4c1f1b045225fe549b127a5b30f83a710aff1;hb=refs%2Fchanges%2F76%2F95176%2F5;hp=2a1f8e1c990b259aee98fa0c1bc013ce4ed60ebf;hpb=5babaa4fc00be0a587748c17b11f523ba8d61498;p=yangtools.git diff --git a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java index 2a1f8e1c99..67e4c1f1b0 100644 --- a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java +++ b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java @@ -7,75 +7,119 @@ */ package org.opendaylight.yangtools.yang.data.codec.gson; -import static org.opendaylight.yangtools.yang.data.codec.gson.JsonParserStream.ANYXML_ARRAY_ELEMENT_ID; -import static org.w3c.dom.Node.ELEMENT_NODE; -import static org.w3c.dom.Node.TEXT_NODE; +import static com.google.common.base.Preconditions.checkState; +import static java.util.Objects.requireNonNull; -import com.google.common.base.Preconditions; +import com.google.common.collect.ClassToInstanceMap; +import com.google.common.collect.ImmutableClassToInstanceMap; import com.google.gson.stream.JsonWriter; import java.io.IOException; -import java.net.URI; -import java.util.regex.Matcher; +import java.util.NoSuchElementException; import java.util.regex.Pattern; -import javax.annotation.RegEx; import javax.xml.transform.dom.DOMSource; -import org.opendaylight.yangtools.yang.common.QName; +import org.checkerframework.checker.regex.qual.Regex; +import org.opendaylight.yangtools.rfc8528.data.api.MountPointContext; +import org.opendaylight.yangtools.rfc8528.data.api.MountPointIdentifier; +import org.opendaylight.yangtools.rfc8528.data.api.StreamWriterMountPointExtension; +import org.opendaylight.yangtools.yang.common.XMLNamespace; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.AugmentationIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates; +import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeWithValue; +import org.opendaylight.yangtools.yang.data.api.schema.NormalizedAnydata; import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter; -import org.opendaylight.yangtools.yang.data.impl.codec.SchemaTracker; -import org.opendaylight.yangtools.yang.model.api.AnyXmlSchemaNode; -import org.opendaylight.yangtools.yang.model.api.LeafListSchemaNode; -import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode; +import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriterExtension; +import org.opendaylight.yangtools.yang.data.util.NormalizedNodeStreamWriterStack; +import org.opendaylight.yangtools.yang.model.api.AnydataSchemaNode; +import org.opendaylight.yangtools.yang.model.api.AnyxmlSchemaNode; +import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode; +import org.opendaylight.yangtools.yang.model.api.EffectiveModelContext; +import org.opendaylight.yangtools.yang.model.api.EffectiveStatementInference; import org.opendaylight.yangtools.yang.model.api.SchemaNode; import org.opendaylight.yangtools.yang.model.api.SchemaPath; +import org.opendaylight.yangtools.yang.model.api.TypedDataSchemaNode; +import org.opendaylight.yangtools.yang.model.api.stmt.SchemaNodeIdentifier.Absolute; +import org.opendaylight.yangtools.yang.model.util.SchemaInferenceStack; +import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; +import org.w3c.dom.Text; /** * This implementation will create JSON output as output stream. * + *

* Values of leaf and leaf-list are NOT translated according to codecs. - * */ -public final class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWriter { +public abstract class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWriter, + StreamWriterMountPointExtension { + private static final class Exclusive extends JSONNormalizedNodeStreamWriter { + Exclusive(final JSONCodecFactory codecFactory, final NormalizedNodeStreamWriterStack tracker, + final JsonWriter writer, final JSONStreamWriterRootContext rootContext) { + super(codecFactory, tracker, writer, rootContext); + } + + @Override + public void close() throws IOException { + flush(); + closeWriter(); + } + } + + private static final class Nested extends JSONNormalizedNodeStreamWriter { + Nested(final JSONCodecFactory codecFactory, final NormalizedNodeStreamWriterStack tracker, + final JsonWriter writer, final JSONStreamWriterRootContext rootContext) { + super(codecFactory, tracker, writer, rootContext); + } + + @Override + public void close() throws IOException { + flush(); + // The caller "owns" the writer, let them close it + } + } + /** * RFC6020 deviation: we are not required to emit empty containers unless they * are marked as 'presence'. */ private static final boolean DEFAULT_EMIT_EMPTY_CONTAINERS = true; - @RegEx + @Regex private static final String NUMBER_STRING = "-?\\d+(\\.\\d+)?"; private static final Pattern NUMBER_PATTERN = Pattern.compile(NUMBER_STRING); - @RegEx + @Regex private static final String NOT_DECIMAL_NUMBER_STRING = "-?\\d+"; private static final Pattern NOT_DECIMAL_NUMBER_PATTERN = Pattern.compile(NOT_DECIMAL_NUMBER_STRING); - private final SchemaTracker tracker; + private final NormalizedNodeStreamWriterStack tracker; private final JSONCodecFactory codecs; private final JsonWriter writer; private JSONStreamWriterContext context; - private JSONNormalizedNodeStreamWriter(final JSONCodecFactory codecFactory, final SchemaPath path, final JsonWriter JsonWriter, final JSONStreamWriterRootContext rootContext) { - this.writer = Preconditions.checkNotNull(JsonWriter); - this.codecs = Preconditions.checkNotNull(codecFactory); - this.tracker = SchemaTracker.create(codecFactory.getSchemaContext(), path); - this.context = Preconditions.checkNotNull(rootContext); + JSONNormalizedNodeStreamWriter(final JSONCodecFactory codecFactory, final NormalizedNodeStreamWriterStack tracker, + final JsonWriter writer, final JSONStreamWriterRootContext rootContext) { + this.writer = requireNonNull(writer); + this.codecs = requireNonNull(codecFactory); + this.tracker = requireNonNull(tracker); + this.context = requireNonNull(rootContext); } /** * Create a new stream writer, which writes to the specified output stream. * + *

* The codec factory can be reused between multiple writers. * + *

* Returned writer is exclusive user of JsonWriter, which means it will start * top-level JSON element and ends it. * + *

* This instance of writer can be used only to emit one top level element, - * otherwise it will produce incorrect JSON. + * otherwise it will produce incorrect JSON. Closing this instance will close + * the writer too. * * @param codecFactory JSON codec factory * @param path Schema Path @@ -83,19 +127,106 @@ public final class JSONNormalizedNodeStreamWriter implements NormalizedNodeStrea * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory, final SchemaPath path, final URI initialNs, final JsonWriter jsonWriter) { - return new JSONNormalizedNodeStreamWriter(codecFactory, path, jsonWriter, new JSONStreamWriterExclusiveRootContext(initialNs)); + public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory, + final SchemaPath path, final XMLNamespace initialNs, final JsonWriter jsonWriter) { + return new Exclusive(codecFactory, + NormalizedNodeStreamWriterStack.of(codecFactory.getEffectiveModelContext(), path), jsonWriter, + new JSONStreamWriterExclusiveRootContext(initialNs)); } /** * Create a new stream writer, which writes to the specified output stream. * + *

* The codec factory can be reused between multiple writers. * + *

+ * Returned writer is exclusive user of JsonWriter, which means it will start + * top-level JSON element and ends it. + * + *

+ * This instance of writer can be used only to emit one top level element, + * otherwise it will produce incorrect JSON. Closing this instance will close + * the writer too. + * + * @param codecFactory JSON codec factory + * @param rootNode Root node inference + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter + * @return A stream writer instance + */ + public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory, + final EffectiveStatementInference rootNode, final XMLNamespace initialNs, final JsonWriter jsonWriter) { + return new Exclusive(codecFactory, NormalizedNodeStreamWriterStack.of(rootNode), jsonWriter, + new JSONStreamWriterExclusiveRootContext(initialNs)); + } + + /** + * Create a new stream writer, which writes to the specified output stream. + * + *

+ * The codec factory can be reused between multiple writers. + * + *

+ * Returned writer is exclusive user of JsonWriter, which means it will start + * top-level JSON element and ends it. + * + *

+ * This instance of writer can be used only to emit one top level element, + * otherwise it will produce incorrect JSON. Closing this instance will close + * the writer too. + * + * @param codecFactory JSON codec factory + * @param path Schema Path + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter + * @return A stream writer instance + */ + public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory, + final Absolute path, final XMLNamespace initialNs, final JsonWriter jsonWriter) { + return new Exclusive(codecFactory, + NormalizedNodeStreamWriterStack.of(codecFactory.getEffectiveModelContext(), path), jsonWriter, + new JSONStreamWriterExclusiveRootContext(initialNs)); + } + + /** + * Create a new stream writer, which writes to the specified output stream. + * + *

+ * The codec factory can be reused between multiple writers. + * + *

+ * Returned writer can be used emit multiple top level element, + * but does not start / close parent JSON object, which must be done + * by user providing {@code jsonWriter} instance in order for + * JSON to be valid. Closing this instance will not + * close the wrapped writer; the caller must take care of that. + * + * @param codecFactory JSON codec factory + * @param path Schema Path + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter + * @return A stream writer instance + */ + public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory, + final SchemaPath path, final XMLNamespace initialNs, final JsonWriter jsonWriter) { + return new Nested(codecFactory, + NormalizedNodeStreamWriterStack.of(codecFactory.getEffectiveModelContext(), path), jsonWriter, + new JSONStreamWriterSharedRootContext(initialNs)); + } + + /** + * Create a new stream writer, which writes to the specified output stream. + * + *

+ * The codec factory can be reused between multiple writers. + * + *

* Returned writer can be used emit multiple top level element, * but does not start / close parent JSON object, which must be done * by user providing {@code jsonWriter} instance in order for - * JSON to be valid. + * JSON to be valid. Closing this instance will not + * close the wrapped writer; the caller must take care of that. * * @param codecFactory JSON codec factory * @param path Schema Path @@ -103,35 +234,64 @@ public final class JSONNormalizedNodeStreamWriter implements NormalizedNodeStrea * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory, final SchemaPath path, final URI initialNs, final JsonWriter jsonWriter) { - return new JSONNormalizedNodeStreamWriter(codecFactory, path, jsonWriter, new JSONStreamWriterSharedRootContext(initialNs)); + public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory, + final Absolute path, final XMLNamespace initialNs, final JsonWriter jsonWriter) { + return new Nested(codecFactory, + NormalizedNodeStreamWriterStack.of(codecFactory.getEffectiveModelContext(), path), jsonWriter, + new JSONStreamWriterSharedRootContext(initialNs)); + } + + /** + * Create a new stream writer, which writes to the specified output stream. + * + *

+ * The codec factory can be reused between multiple writers. + * + *

+ * Returned writer can be used emit multiple top level element, + * but does not start / close parent JSON object, which must be done + * by user providing {@code jsonWriter} instance in order for + * JSON to be valid. Closing this instance will not + * close the wrapped writer; the caller must take care of that. + * + * @param codecFactory JSON codec factory + * @param rootNode Root node inference + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter + * @return A stream writer instance + */ + public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory, + final EffectiveStatementInference rootNode, final XMLNamespace initialNs, final JsonWriter jsonWriter) { + return new Nested(codecFactory, NormalizedNodeStreamWriterStack.of(rootNode), jsonWriter, + new JSONStreamWriterSharedRootContext(initialNs)); + } + + @Override + public ClassToInstanceMap getExtensions() { + return ImmutableClassToInstanceMap.of(StreamWriterMountPointExtension.class, this); } @Override - public void leafNode(final NodeIdentifier name, final Object value) throws IOException { - final LeafSchemaNode schema = tracker.leafNode(name); - final JSONCodec codec = codecs.codecFor(schema); - context.emittingChild(codecs.getSchemaContext(), writer); - context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType()); - writeValue(value, codec); + public void startLeafNode(final NodeIdentifier name) throws IOException { + tracker.startLeafNode(name); + context.emittingChild(codecs.getEffectiveModelContext(), writer); + context.writeChildJsonIdentifier(codecs.getEffectiveModelContext(), writer, name.getNodeType()); } @Override - public void startLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startLeafSet(name); context = new JSONStreamWriterListContext(context, name); } @Override - public void leafSetEntryNode(final QName name, final Object value) throws IOException { - final LeafListSchemaNode schema = tracker.leafSetEntryNode(name); - final JSONCodec codec = codecs.codecFor(schema); - context.emittingChild(codecs.getSchemaContext(), writer); - writeValue(value, codec); + public void startLeafSetEntryNode(final NodeWithValue name) throws IOException { + tracker.startLeafSetEntryNode(name); + context.emittingChild(codecs.getEffectiveModelContext(), writer); } @Override - public void startOrderedLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startOrderedLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startLeafSet(name); context = new JSONStreamWriterListContext(context, name); } @@ -140,195 +300,281 @@ public final class JSONNormalizedNodeStreamWriter implements NormalizedNodeStrea * Warning suppressed due to static final constant which triggers a warning * for the call to schema.isPresenceContainer(). */ - @SuppressWarnings("unused") @Override - public void startContainerNode(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startContainerNode(final NodeIdentifier name, final int childSizeHint) throws IOException { final SchemaNode schema = tracker.startContainerNode(name); - - // FIXME this code ignores presence for containers - // but datastore does as well and it needs be fixed first (2399) - context = new JSONStreamWriterNamedObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS); + final boolean isPresence = schema instanceof ContainerSchemaNode + ? ((ContainerSchemaNode) schema).isPresenceContainer() : DEFAULT_EMIT_EMPTY_CONTAINERS; + context = new JSONStreamWriterNamedObjectContext(context, name, isPresence); } @Override - public void startUnkeyedList(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startUnkeyedList(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startList(name); context = new JSONStreamWriterListContext(context, name); } @Override - public void startUnkeyedListItem(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startUnkeyedListItem(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startListItem(name); context = new JSONStreamWriterObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS); } @Override - public void startMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startList(name); context = new JSONStreamWriterListContext(context, name); } @Override - public void startMapEntryNode(final NodeIdentifierWithPredicates identifier, final int childSizeHint) + public final void startMapEntryNode(final NodeIdentifierWithPredicates identifier, final int childSizeHint) throws IOException { tracker.startListItem(identifier); context = new JSONStreamWriterObjectContext(context, identifier, DEFAULT_EMIT_EMPTY_CONTAINERS); } @Override - public void startOrderedMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startOrderedMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startList(name); context = new JSONStreamWriterListContext(context, name); } @Override - public void startChoiceNode(final NodeIdentifier name, final int childSizeHint) { + public final void startChoiceNode(final NodeIdentifier name, final int childSizeHint) { tracker.startChoiceNode(name); context = new JSONStreamWriterInvisibleContext(context); } @Override - public void startAugmentationNode(final AugmentationIdentifier identifier) { + public final void startAugmentationNode(final AugmentationIdentifier identifier) { tracker.startAugmentationNode(identifier); context = new JSONStreamWriterInvisibleContext(context); } @Override - public void anyxmlNode(final NodeIdentifier name, final Object value) throws IOException { - @SuppressWarnings("unused") - final AnyXmlSchemaNode schema = tracker.anyxmlNode(name); - // FIXME: should have a codec based on this :) + public final boolean startAnydataNode(final NodeIdentifier name, final Class objectModel) throws IOException { + if (NormalizedAnydata.class.isAssignableFrom(objectModel)) { + tracker.startAnydataNode(name); + context.emittingChild(codecs.getEffectiveModelContext(), writer); + context.writeChildJsonIdentifier(codecs.getEffectiveModelContext(), writer, name.getNodeType()); + return true; + } - context.emittingChild(codecs.getSchemaContext(), writer); - context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType()); + return false; + } - writeAnyXmlValue((DOMSource) value); + @Override + public final NormalizedNodeStreamWriter startMountPoint(final MountPointIdentifier mountId, + final MountPointContext mountCtx) throws IOException { + final EffectiveModelContext ctx = mountCtx.getEffectiveModelContext(); + return new Nested(codecs.rebaseTo(ctx), NormalizedNodeStreamWriterStack.of(ctx), writer, + new JSONStreamWriterSharedRootContext(context.getNamespace())); } @Override - public void startYangModeledAnyXmlNode(final NodeIdentifier name, final int childSizeHint) throws IOException { - tracker.startYangModeledAnyXmlNode(name); - context = new JSONStreamWriterNamedObjectContext(context, name, true); + public final boolean startAnyxmlNode(final NodeIdentifier name, final Class objectModel) throws IOException { + if (DOMSource.class.isAssignableFrom(objectModel)) { + tracker.startAnyxmlNode(name); + context.emittingChild(codecs.getEffectiveModelContext(), writer); + context.writeChildJsonIdentifier(codecs.getEffectiveModelContext(), writer, name.getNodeType()); + return true; + } + return false; } @Override - public void endNode() throws IOException { + public final void endNode() throws IOException { tracker.endNode(); - context = context.endNode(codecs.getSchemaContext(), writer); + context = context.endNode(codecs.getEffectiveModelContext(), writer); + } + + @Override + public final void flush() throws IOException { + writer.flush(); + } - if (context instanceof JSONStreamWriterRootContext) { - context.emitEnd(writer); + final void closeWriter() throws IOException { + if (!(context instanceof JSONStreamWriterRootContext)) { + throw new IOException("Unexpected root context " + context); } + + context.endNode(codecs.getEffectiveModelContext(), writer); + writer.close(); } - private void writeValue(final Object value, final JSONCodec codec) - throws IOException { - ((JSONCodec) codec).serializeToWriter(writer, value); + @Override + public void scalarValue(final Object value) throws IOException { + final Object current = tracker.getParent(); + if (current instanceof TypedDataSchemaNode) { + writeValue(value, codecs.codecFor((TypedDataSchemaNode) current)); + } else if (current instanceof AnydataSchemaNode) { + writeAnydataValue(value); + } else { + throw new IllegalStateException(String.format("Cannot emit scalar %s for %s", value, current)); + } + } + + @Override + public void domSourceValue(final DOMSource value) throws IOException { + final Object current = tracker.getParent(); + checkState(current instanceof AnyxmlSchemaNode, "Cannot emit DOMSource %s for %s", value, current); + // FIXME: should have a codec based on this :) + writeAnyXmlValue(value); + } + + @SuppressWarnings("unchecked") + private void writeValue(final Object value, final JSONCodec codec) throws IOException { + ((JSONCodec) codec).writeValue(writer, value); + } + + private void writeAnydataValue(final Object value) throws IOException { + if (value instanceof NormalizedAnydata) { + writeNormalizedAnydata((NormalizedAnydata) value); + } else { + throw new IllegalStateException("Unexpected anydata value " + value); + } + } + + private void writeNormalizedAnydata(final NormalizedAnydata anydata) throws IOException { + // Adjust state to point to parent node and ensure it can handle data tree nodes + final SchemaInferenceStack.Inference inference; + try { + final SchemaInferenceStack stack = SchemaInferenceStack.ofInference(anydata.getInference()); + stack.exitToDataTree(); + inference = stack.toInference(); + } catch (IllegalArgumentException | IllegalStateException | NoSuchElementException e) { + throw new IOException("Cannot emit " + anydata, e); + } + + anydata.writeTo(JSONNormalizedNodeStreamWriter.createNestedWriter( + codecs.rebaseTo(inference.getEffectiveModelContext()), inference, context.getNamespace(), writer)); } private void writeAnyXmlValue(final DOMSource anyXmlValue) throws IOException { - final Node documentNode = anyXmlValue.getNode(); - final Node firstChild = documentNode.getFirstChild(); - if (ELEMENT_NODE == firstChild.getNodeType() && !ANYXML_ARRAY_ELEMENT_ID.equals(firstChild.getNodeName())) { - writer.beginObject(); - traverseAnyXmlValue(documentNode); - writer.endObject(); + writeXmlNode(anyXmlValue.getNode()); + } + + private void writeXmlNode(final Node node) throws IOException { + if (isArrayElement(node)) { + writeArrayContent(node); + return; + } + final Element firstChildElement = getFirstChildElement(node); + if (firstChildElement == null) { + writeXmlValue(node); } else { - traverseAnyXmlValue(documentNode); + writeObjectContent(firstChildElement); } } - private void traverseAnyXmlValue(final Node node) throws IOException { - final NodeList children = node.getChildNodes(); - boolean inArray = false; + private void writeArrayContent(final Node node) throws IOException { + writer.beginArray(); + handleArray(node); + writer.endArray(); + } - for (int i = 0, length = children.getLength(); i < length; i++) { - final Node childNode = children.item(i); - boolean inObject = false; - - if (ELEMENT_NODE == childNode.getNodeType()) { - final Node firstChild = childNode.getFirstChild(); - // beginning of an array - if (ANYXML_ARRAY_ELEMENT_ID.equals(childNode.getNodeName()) && !inArray) { - writer.beginArray(); - inArray = true; - // object at the beginning of the array - if (isJsonObjectInArray(childNode, firstChild)) { - writer.beginObject(); - inObject = true; - } - // object in the array - } else if (isJsonObjectInArray(childNode, firstChild)) { - writer.beginObject(); - inObject = true; - // object - } else if (isJsonObject(firstChild)) { - writer.name(childNode.getNodeName()); - writer.beginObject(); - inObject = true; - // name - } else if (!inArray){ - writer.name(childNode.getNodeName()); + private void writeObjectContent(final Element firstChildElement) throws IOException { + writer.beginObject(); + writeObject(firstChildElement); + writer.endObject(); + } + + private static boolean isArrayElement(final Node node) { + if (Node.ELEMENT_NODE == node.getNodeType()) { + final String nodeName = node.getNodeName(); + for (Node nextNode = node.getNextSibling(); nextNode != null; nextNode = nextNode.getNextSibling()) { + if (Node.ELEMENT_NODE == nextNode.getNodeType() && nodeName.equals(nextNode.getNodeName())) { + return true; } } + } + return false; + } - // text value, i.e. a number, string, boolean or null - if (TEXT_NODE == childNode.getNodeType()) { - final String childNodeText = childNode.getNodeValue(); - final Matcher matcher = NUMBER_PATTERN.matcher(childNodeText); - if (matcher.matches()) { - writer.value(parseNumber(childNodeText)); - } else if ("true".equals(childNodeText) || "false".equals(childNodeText)) { - writer.value(Boolean.parseBoolean(childNodeText)); - } else if ("null".equals(childNodeText)) { - writer.nullValue(); + private void handleArray(final Node node) throws IOException { + final Element parentNode = (Element)node.getParentNode(); + final NodeList elementsList = parentNode.getElementsByTagName(node.getNodeName()); + for (int i = 0, length = elementsList.getLength(); i < length; i++) { + final Node arrayElement = elementsList.item(i); + final Element parent = (Element)arrayElement.getParentNode(); + if (parentNode.isSameNode(parent)) { + final Element firstChildElement = getFirstChildElement(arrayElement); + if (firstChildElement != null) { + writeObjectContent(firstChildElement); } else { - writer.value(childNodeText); + // It may be scalar + writeXmlValue(arrayElement); } - - return; - } - - traverseAnyXmlValue(childNode); - - if (inObject) { - writer.endObject(); } } + } - if (inArray) { - writer.endArray(); + private void writeObject(Node node) throws IOException { + String previousNodeName = ""; + while (node != null) { + if (Node.ELEMENT_NODE == node.getNodeType()) { + if (!node.getNodeName().equals(previousNodeName)) { + previousNodeName = node.getNodeName(); + writer.name(node.getNodeName()); + writeXmlNode(node); + } + } + node = node.getNextSibling(); } } - // json numbers are 64 bit wide floating point numbers - in java terms it is either long or double - private static Number parseNumber(final String numberText) { - Matcher matcher = NOT_DECIMAL_NUMBER_PATTERN.matcher(numberText); - if (matcher.matches()) { - return Long.parseLong(numberText); - } else { - return Double.parseDouble(numberText); + private void writeXmlValue(final Node node) throws IOException { + Text firstChild = getFirstChildText(node); + String childNodeText = firstChild != null ? firstChild.getWholeText() : ""; + childNodeText = childNodeText != null ? childNodeText.trim() : ""; + + if (NUMBER_PATTERN.matcher(childNodeText).matches()) { + writer.value(parseNumber(childNodeText)); + return; + } + switch (childNodeText) { + case "null": + writer.nullValue(); + break; + case "false": + writer.value(false); + break; + case "true": + writer.value(true); + break; + default: + writer.value(childNodeText); } } - private static boolean isJsonObject(final Node firstChild) { - return !ANYXML_ARRAY_ELEMENT_ID.equals(firstChild.getNodeName()) && TEXT_NODE != firstChild.getNodeType(); + private static Element getFirstChildElement(final Node node) { + final NodeList children = node.getChildNodes(); + for (int i = 0, length = children.getLength(); i < length; i++) { + final Node childNode = children.item(i); + if (Node.ELEMENT_NODE == childNode.getNodeType()) { + return (Element) childNode; + } + } + return null; } - private static boolean isJsonObjectInArray(final Node node, final Node firstChild) { - return ANYXML_ARRAY_ELEMENT_ID.equals(node.getNodeName()) - && !ANYXML_ARRAY_ELEMENT_ID.equals(firstChild.getNodeName()) - && TEXT_NODE != firstChild.getNodeType(); + private static Text getFirstChildText(final Node node) { + final NodeList children = node.getChildNodes(); + for (int i = 0, length = children.getLength(); i < length; i++) { + final Node childNode = children.item(i); + if (Node.TEXT_NODE == childNode.getNodeType()) { + return (Text) childNode; + } + } + return null; } - @Override - public void flush() throws IOException { - writer.flush(); - } + // json numbers are 64 bit wide floating point numbers - in java terms it is either long or double + private static Number parseNumber(final String numberText) { + if (NOT_DECIMAL_NUMBER_PATTERN.matcher(numberText).matches()) { + return Long.valueOf(numberText); + } - @Override - public void close() throws IOException { - flush(); - writer.close(); + return Double.valueOf(numberText); } }