X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?a=blobdiff_plain;f=yang%2Fyang-data-codec-gson%2Fsrc%2Fmain%2Fjava%2Forg%2Fopendaylight%2Fyangtools%2Fyang%2Fdata%2Fcodec%2Fgson%2FJSONNormalizedNodeStreamWriter.java;h=4a2206d7f9d32ea851630acef702eb2f08599e7a;hb=5337e2540746f9d3f101f847a32781fa7a8241d3;hp=079b8e324aeba0638e2832596059b50c3f279f68;hpb=dcbf1f7b9b958a345204494514d568b59aca6096;p=yangtools.git diff --git a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java index 079b8e324a..4a2206d7f9 100644 --- a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java +++ b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONNormalizedNodeStreamWriter.java @@ -7,258 +7,575 @@ */ package org.opendaylight.yangtools.yang.data.codec.gson; -import com.google.common.base.Preconditions; +import static com.google.common.base.Preconditions.checkState; +import static com.google.common.base.Verify.verify; +import static java.util.Objects.requireNonNull; +import static org.w3c.dom.Node.ELEMENT_NODE; +import static org.w3c.dom.Node.TEXT_NODE; + +import com.google.common.collect.ClassToInstanceMap; +import com.google.common.collect.ImmutableClassToInstanceMap; import com.google.gson.stream.JsonWriter; import java.io.IOException; -import java.io.Writer; -import java.net.URI; +import java.util.List; +import java.util.regex.Pattern; +import javax.xml.transform.dom.DOMSource; +import org.checkerframework.checker.regex.qual.Regex; +import org.opendaylight.yangtools.rfc8528.data.api.MountPointContext; +import org.opendaylight.yangtools.rfc8528.data.api.MountPointIdentifier; +import org.opendaylight.yangtools.rfc8528.data.api.StreamWriterMountPointExtension; +import org.opendaylight.yangtools.yang.common.XMLNamespace; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.AugmentationIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates; +import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeWithValue; +import org.opendaylight.yangtools.yang.data.api.schema.NormalizedAnydata; import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter; +import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriterExtension; import org.opendaylight.yangtools.yang.data.impl.codec.SchemaTracker; -import org.opendaylight.yangtools.yang.model.api.AnyXmlSchemaNode; -import org.opendaylight.yangtools.yang.model.api.LeafListSchemaNode; -import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode; -import org.opendaylight.yangtools.yang.model.api.SchemaContext; +import org.opendaylight.yangtools.yang.model.api.AnydataSchemaNode; +import org.opendaylight.yangtools.yang.model.api.AnyxmlSchemaNode; +import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode; +import org.opendaylight.yangtools.yang.model.api.DataNodeContainer; +import org.opendaylight.yangtools.yang.model.api.EffectiveModelContext; +import org.opendaylight.yangtools.yang.model.api.EffectiveStatementInference; import org.opendaylight.yangtools.yang.model.api.SchemaNode; import org.opendaylight.yangtools.yang.model.api.SchemaPath; +import org.opendaylight.yangtools.yang.model.api.TypedDataSchemaNode; +import org.opendaylight.yangtools.yang.model.api.meta.EffectiveStatement; +import org.opendaylight.yangtools.yang.model.api.stmt.SchemaNodeIdentifier.Absolute; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.w3c.dom.Text; /** * This implementation will create JSON output as output stream. * + *

* Values of leaf and leaf-list are NOT translated according to codecs. - * */ -public class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWriter { +public abstract class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWriter, + StreamWriterMountPointExtension { + private static final class Exclusive extends JSONNormalizedNodeStreamWriter { + Exclusive(final JSONCodecFactory codecFactory, final SchemaTracker tracker, final JsonWriter writer, + final JSONStreamWriterRootContext rootContext) { + super(codecFactory, tracker, writer, rootContext); + } + + @Override + public void close() throws IOException { + flush(); + closeWriter(); + } + } + + private static final class Nested extends JSONNormalizedNodeStreamWriter { + Nested(final JSONCodecFactory codecFactory, final SchemaTracker tracker, final JsonWriter writer, + final JSONStreamWriterRootContext rootContext) { + super(codecFactory, tracker, writer, rootContext); + } + + @Override + public void close() throws IOException { + flush(); + // The caller "owns" the writer, let them close it + } + } + /** * RFC6020 deviation: we are not required to emit empty containers unless they * are marked as 'presence'. */ private static final boolean DEFAULT_EMIT_EMPTY_CONTAINERS = true; + @Regex + private static final String NUMBER_STRING = "-?\\d+(\\.\\d+)?"; + private static final Pattern NUMBER_PATTERN = Pattern.compile(NUMBER_STRING); + + @Regex + private static final String NOT_DECIMAL_NUMBER_STRING = "-?\\d+"; + private static final Pattern NOT_DECIMAL_NUMBER_PATTERN = Pattern.compile(NOT_DECIMAL_NUMBER_STRING); + private final SchemaTracker tracker; private final JSONCodecFactory codecs; private final JsonWriter writer; private JSONStreamWriterContext context; - private JSONNormalizedNodeStreamWriter(final JSONCodecFactory codecFactory, final SchemaPath path, final URI initialNs, JsonWriter JsonWriter) { - this.writer = JsonWriter; - this.codecs = Preconditions.checkNotNull(codecFactory); - this.tracker = SchemaTracker.create(codecFactory.getSchemaContext(), path); - this.context = new JSONStreamWriterRootContext(initialNs); + JSONNormalizedNodeStreamWriter(final JSONCodecFactory codecFactory, final SchemaTracker tracker, + final JsonWriter writer, final JSONStreamWriterRootContext rootContext) { + this.writer = requireNonNull(writer); + this.codecs = requireNonNull(codecFactory); + this.tracker = requireNonNull(tracker); + this.context = requireNonNull(rootContext); } /** - * Create a new stream writer, which writes to the specified {@link Writer}. + * Create a new stream writer, which writes to the specified output stream. * - * @param schemaContext Schema context - * @param writer Output writer - * @return A stream writer instance - */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final Writer writer) { - return create(schemaContext, SchemaPath.ROOT, null, writer); - } - - /** - * Create a new stream writer, which writes to the specified {@link Writer}. + *

+ * The codec factory can be reused between multiple writers. + * + *

+ * Returned writer is exclusive user of JsonWriter, which means it will start + * top-level JSON element and ends it. * - * @param schemaContext Schema context - * @param path Root schemapath - * @param writer Output writer + *

+ * This instance of writer can be used only to emit one top level element, + * otherwise it will produce incorrect JSON. Closing this instance will close + * the writer too. + * + * @param codecFactory JSON codec factory + * @param path Schema Path + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final SchemaPath path, final Writer writer) { - return create(schemaContext, path, null, writer); + public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory, + final SchemaPath path, final XMLNamespace initialNs, final JsonWriter jsonWriter) { + return new Exclusive(codecFactory, SchemaTracker.create(codecFactory.getEffectiveModelContext(), path), + jsonWriter, new JSONStreamWriterExclusiveRootContext(initialNs)); } /** - * Create a new stream writer, which writes to the specified {@link Writer}. + * Create a new stream writer, which writes to the specified output stream. + * + *

+ * The codec factory can be reused between multiple writers. + * + *

+ * Returned writer is exclusive user of JsonWriter, which means it will start + * top-level JSON element and ends it. * - * @param schemaContext Schema context - * @param path Root schemapath - * @param writer Output writer + *

+ * This instance of writer can be used only to emit one top level element, + * otherwise it will produce incorrect JSON. Closing this instance will close + * the writer too. + * + * @param codecFactory JSON codec factory + * @param path Schema Path * @param initialNs Initial namespace + * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final SchemaPath path, - final URI initialNs, final Writer writer) { - return create(JSONCodecFactory.create(schemaContext), path, initialNs, JsonWriterFactory.createJsonWriter(writer)); + public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory, + final Absolute path, final XMLNamespace initialNs, final JsonWriter jsonWriter) { + return new Exclusive(codecFactory, SchemaTracker.create(codecFactory.getEffectiveModelContext(), path), + jsonWriter, new JSONStreamWriterExclusiveRootContext(initialNs)); } /** * Create a new stream writer, which writes to the specified output stream. * - * @param schemaContext Schema context - * @param writer Output writer - * @param indentSize indentation size + *

+ * The codec factory can be reused between multiple writers. + * + *

+ * Returned writer is exclusive user of JsonWriter, which means it will start + * top-level JSON element and ends it. + * + *

+ * This instance of writer can be used only to emit one top level element, + * otherwise it will produce incorrect JSON. Closing this instance will close + * the writer too. + * + * @param codecFactory JSON codec factory + * @param rootNode Root node + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final Writer writer, final int indentSize) { - return create(JSONCodecFactory.create(schemaContext), SchemaPath.ROOT, null,JsonWriterFactory.createJsonWriter(writer, indentSize)); + public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory, + final DataNodeContainer rootNode, final XMLNamespace initialNs, final JsonWriter jsonWriter) { + return new Exclusive(codecFactory, SchemaTracker.create(rootNode), jsonWriter, + new JSONStreamWriterExclusiveRootContext(initialNs)); } /** - * Create a new stream writer, which writes to the specified output stream. The codec factory - * can be reused between multiple writers. + * Create a new stream writer, which writes to the specified output stream. + * + *

+ * The codec factory can be reused between multiple writers. + * + *

+ * Returned writer can be used emit multiple top level element, + * but does not start / close parent JSON object, which must be done + * by user providing {@code jsonWriter} instance in order for + * JSON to be valid. Closing this instance will not + * close the wrapped writer; the caller must take care of that. * * @param codecFactory JSON codec factory - * @param writer Output writer - * @param indentSize indentation size + * @param path Schema Path + * @param initialNs Initial namespace + * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter create(final JSONCodecFactory codecFactory, final Writer writer, final int indentSize) { - return create(codecFactory, SchemaPath.ROOT, null, JsonWriterFactory.createJsonWriter(writer,indentSize)); + public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory, + final SchemaPath path, final XMLNamespace initialNs, final JsonWriter jsonWriter) { + return new Nested(codecFactory, SchemaTracker.create(codecFactory.getEffectiveModelContext(), path), jsonWriter, + new JSONStreamWriterSharedRootContext(initialNs)); } /** * Create a new stream writer, which writes to the specified output stream. * - * @param schemaContext Schema context + *

+ * The codec factory can be reused between multiple writers. + * + *

+ * Returned writer can be used emit multiple top level element, + * but does not start / close parent JSON object, which must be done + * by user providing {@code jsonWriter} instance in order for + * JSON to be valid. Closing this instance will not + * close the wrapped writer; the caller must take care of that. + * + * @param codecFactory JSON codec factory * @param path Schema Path * @param initialNs Initial namespace * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter create(SchemaContext schemaContext, SchemaPath path, URI initialNs, - JsonWriter jsonWriter) { - return create(JSONCodecFactory.create(schemaContext), path, initialNs, jsonWriter); + public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory, + final Absolute path, final XMLNamespace initialNs, final JsonWriter jsonWriter) { + return new Nested(codecFactory, SchemaTracker.create(codecFactory.getEffectiveModelContext(), path), jsonWriter, + new JSONStreamWriterSharedRootContext(initialNs)); } /** - * Create a new stream writer, which writes to the specified output stream. The codec factory - * can be reused between multiple writers. + * Create a new stream writer, which writes to the specified output stream. + * + *

+ * The codec factory can be reused between multiple writers. + * + *

+ * Returned writer can be used emit multiple top level element, + * but does not start / close parent JSON object, which must be done + * by user providing {@code jsonWriter} instance in order for + * JSON to be valid. Closing this instance will not + * close the wrapped writer; the caller must take care of that. * * @param codecFactory JSON codec factory - * @param path Schema Path + * @param rootNode Root node * @param initialNs Initial namespace * @param jsonWriter JsonWriter * @return A stream writer instance */ - public static NormalizedNodeStreamWriter create(JSONCodecFactory codecFactory, SchemaPath path, URI initialNs, JsonWriter jsonWriter) { - return new JSONNormalizedNodeStreamWriter(codecFactory, path, initialNs, jsonWriter); + public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory, + final DataNodeContainer rootNode, final XMLNamespace initialNs, final JsonWriter jsonWriter) { + return new Nested(codecFactory, SchemaTracker.create(rootNode), jsonWriter, + new JSONStreamWriterSharedRootContext(initialNs)); } @Override - public void leafNode(final NodeIdentifier name, final Object value) throws IOException { - final LeafSchemaNode schema = tracker.leafNode(name); - final JSONCodec codec = codecs.codecFor(schema.getType()); - - context.emittingChild(codecs.getSchemaContext(), writer); - context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType()); + public ClassToInstanceMap getExtensions() { + return ImmutableClassToInstanceMap.of(StreamWriterMountPointExtension.class, this); + } - writeValue(value, codec); + @Override + public void startLeafNode(final NodeIdentifier name) throws IOException { + tracker.startLeafNode(name); + context.emittingChild(codecs.getEffectiveModelContext(), writer); + context.writeChildJsonIdentifier(codecs.getEffectiveModelContext(), writer, name.getNodeType()); } @Override - public void startLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startLeafSet(name); context = new JSONStreamWriterListContext(context, name); } @Override - public void leafSetEntryNode(final Object value) throws IOException { - final LeafListSchemaNode schema = tracker.leafSetEntryNode(); - final JSONCodec codec = codecs.codecFor(schema.getType()); - - context.emittingChild(codecs.getSchemaContext(), writer); + public void startLeafSetEntryNode(final NodeWithValue name) throws IOException { + tracker.startLeafSetEntryNode(name); + context.emittingChild(codecs.getEffectiveModelContext(), writer); + } - writeValue(value, codec); + @Override + public final void startOrderedLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException { + tracker.startLeafSet(name); + context = new JSONStreamWriterListContext(context, name); } /* * Warning suppressed due to static final constant which triggers a warning * for the call to schema.isPresenceContainer(). */ - @SuppressWarnings("unused") @Override - public void startContainerNode(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startContainerNode(final NodeIdentifier name, final int childSizeHint) throws IOException { final SchemaNode schema = tracker.startContainerNode(name); - - // FIXME this code ignores presence for containers - // but datastore does as well and it needs be fixed first (2399) - context = new JSONStreamWriterNamedObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS); + final boolean isPresence = schema instanceof ContainerSchemaNode + ? ((ContainerSchemaNode) schema).isPresenceContainer() : DEFAULT_EMIT_EMPTY_CONTAINERS; + context = new JSONStreamWriterNamedObjectContext(context, name, isPresence); } @Override - public void startUnkeyedList(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startUnkeyedList(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startList(name); context = new JSONStreamWriterListContext(context, name); } @Override - public void startUnkeyedListItem(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startUnkeyedListItem(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startListItem(name); context = new JSONStreamWriterObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS); } @Override - public void startMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startList(name); context = new JSONStreamWriterListContext(context, name); } @Override - public void startMapEntryNode(final NodeIdentifierWithPredicates identifier, final int childSizeHint) + public final void startMapEntryNode(final NodeIdentifierWithPredicates identifier, final int childSizeHint) throws IOException { tracker.startListItem(identifier); context = new JSONStreamWriterObjectContext(context, identifier, DEFAULT_EMIT_EMPTY_CONTAINERS); } @Override - public void startOrderedMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { + public final void startOrderedMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException { tracker.startList(name); context = new JSONStreamWriterListContext(context, name); } @Override - public void startChoiceNode(final NodeIdentifier name, final int childSizeHint) { + public final void startChoiceNode(final NodeIdentifier name, final int childSizeHint) { tracker.startChoiceNode(name); context = new JSONStreamWriterInvisibleContext(context); } @Override - public void startAugmentationNode(final AugmentationIdentifier identifier) { + public final void startAugmentationNode(final AugmentationIdentifier identifier) { tracker.startAugmentationNode(identifier); context = new JSONStreamWriterInvisibleContext(context); } @Override - public void anyxmlNode(final NodeIdentifier name, final Object value) throws IOException { - @SuppressWarnings("unused") - final AnyXmlSchemaNode schema = tracker.anyxmlNode(name); - // FIXME: should have a codec based on this :) + public final boolean startAnydataNode(final NodeIdentifier name, final Class objectModel) throws IOException { + if (NormalizedAnydata.class.isAssignableFrom(objectModel)) { + tracker.startAnydataNode(name); + context.emittingChild(codecs.getEffectiveModelContext(), writer); + context.writeChildJsonIdentifier(codecs.getEffectiveModelContext(), writer, name.getNodeType()); + return true; + } - context.emittingChild(codecs.getSchemaContext(), writer); - context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType()); - writer.value(String.valueOf(value)); + return false; } @Override - public void endNode() throws IOException { - tracker.endNode(); - context = context.endNode(codecs.getSchemaContext(), writer); - if(context instanceof JSONStreamWriterRootContext) { - context.emitEnd(writer); - } + public final NormalizedNodeStreamWriter startMountPoint(final MountPointIdentifier mountId, + final MountPointContext mountCtx) throws IOException { + final EffectiveModelContext ctx = mountCtx.getEffectiveModelContext(); + return new Nested(codecs.rebaseTo(ctx), SchemaTracker.create(ctx), writer, + new JSONStreamWriterSharedRootContext(context.getNamespace())); } - private void writeValue(Object value, JSONCodec codec) - throws IOException { - codec.serializeToWriter(writer,value); + @Override + public final boolean startAnyxmlNode(final NodeIdentifier name, final Class objectModel) throws IOException { + if (DOMSource.class.isAssignableFrom(objectModel)) { + tracker.startAnyxmlNode(name); + context.emittingChild(codecs.getEffectiveModelContext(), writer); + context.writeChildJsonIdentifier(codecs.getEffectiveModelContext(), writer, name.getNodeType()); + return true; + } + return false; } @Override - public void flush() throws IOException { - writer.flush(); + public final void endNode() throws IOException { + tracker.endNode(); + context = context.endNode(codecs.getEffectiveModelContext(), writer); } @Override - public void close() throws IOException { + public final void flush() throws IOException { writer.flush(); + } + + final void closeWriter() throws IOException { + if (!(context instanceof JSONStreamWriterRootContext)) { + throw new IOException("Unexpected root context " + context); + } + + context.endNode(codecs.getEffectiveModelContext(), writer); writer.close(); } + @Override + public void scalarValue(final Object value) throws IOException { + final Object current = tracker.getParent(); + if (current instanceof TypedDataSchemaNode) { + writeValue(value, codecs.codecFor((TypedDataSchemaNode) current)); + } else if (current instanceof AnydataSchemaNode) { + writeAnydataValue(value); + } else { + throw new IllegalStateException(String.format("Cannot emit scalar %s for %s", value, current)); + } + } + + @Override + public void domSourceValue(final DOMSource value) throws IOException { + final Object current = tracker.getParent(); + checkState(current instanceof AnyxmlSchemaNode, "Cannot emit DOMSource %s for %s", value, current); + // FIXME: should have a codec based on this :) + writeAnyXmlValue(value); + } + + @SuppressWarnings("unchecked") + private void writeValue(final Object value, final JSONCodec codec) throws IOException { + ((JSONCodec) codec).writeValue(writer, value); + } + + private void writeAnydataValue(final Object value) throws IOException { + if (value instanceof NormalizedAnydata) { + writeNormalizedAnydata((NormalizedAnydata) value); + } else { + throw new IllegalStateException("Unexpected anydata value " + value); + } + } + + private void writeNormalizedAnydata(final NormalizedAnydata anydata) throws IOException { + final EffectiveStatementInference inference = anydata.getInference(); + final List> path = inference.statementPath(); + final DataNodeContainer parent; + if (path.size() > 1) { + final EffectiveStatement stmt = path.get(path.size() - 2); + verify(stmt instanceof DataNodeContainer, "Unexpected statement %s", stmt); + parent = (DataNodeContainer) stmt; + } else { + parent = inference.getEffectiveModelContext(); + } + + anydata.writeTo(JSONNormalizedNodeStreamWriter.createNestedWriter( + codecs.rebaseTo(inference.getEffectiveModelContext()), parent, context.getNamespace(), writer)); + } + + private void writeAnyXmlValue(final DOMSource anyXmlValue) throws IOException { + writeXmlNode(anyXmlValue.getNode()); + } + + private void writeXmlNode(final Node node) throws IOException { + if (isArrayElement(node)) { + writeArrayContent(node); + return; + } + final Element firstChildElement = getFirstChildElement(node); + if (firstChildElement == null) { + writeXmlValue(node); + } else { + writeObjectContent(firstChildElement); + } + } + + private void writeArrayContent(final Node node) throws IOException { + writer.beginArray(); + handleArray(node); + writer.endArray(); + } + + private void writeObjectContent(final Element firstChildElement) throws IOException { + writer.beginObject(); + writeObject(firstChildElement); + writer.endObject(); + } + + private static boolean isArrayElement(final Node node) { + if (ELEMENT_NODE == node.getNodeType()) { + final String nodeName = node.getNodeName(); + for (Node nextNode = node.getNextSibling(); nextNode != null; nextNode = nextNode.getNextSibling()) { + if (ELEMENT_NODE == nextNode.getNodeType() && nodeName.equals(nextNode.getNodeName())) { + return true; + } + } + } + return false; + } + + private void handleArray(final Node node) throws IOException { + final Element parentNode = (Element)node.getParentNode(); + final NodeList elementsList = parentNode.getElementsByTagName(node.getNodeName()); + for (int i = 0, length = elementsList.getLength(); i < length; i++) { + final Node arrayElement = elementsList.item(i); + final Element parent = (Element)arrayElement.getParentNode(); + if (parentNode.isSameNode(parent)) { + final Element firstChildElement = getFirstChildElement(arrayElement); + if (firstChildElement != null) { + writeObjectContent(firstChildElement); + } else { + // It may be scalar + writeXmlValue(arrayElement); + } + } + } + } + + private void writeObject(Node node) throws IOException { + String previousNodeName = ""; + while (node != null) { + if (ELEMENT_NODE == node.getNodeType()) { + if (!node.getNodeName().equals(previousNodeName)) { + previousNodeName = node.getNodeName(); + writer.name(node.getNodeName()); + writeXmlNode(node); + } + } + node = node.getNextSibling(); + } + } + private void writeXmlValue(final Node node) throws IOException { + Text firstChild = getFirstChildText(node); + String childNodeText = firstChild != null ? firstChild.getWholeText() : ""; + childNodeText = childNodeText != null ? childNodeText.trim() : ""; + + if (NUMBER_PATTERN.matcher(childNodeText).matches()) { + writer.value(parseNumber(childNodeText)); + return; + } + switch (childNodeText) { + case "null": + writer.nullValue(); + break; + case "false": + writer.value(false); + break; + case "true": + writer.value(true); + break; + default: + writer.value(childNodeText); + } + } + private static Element getFirstChildElement(final Node node) { + final NodeList children = node.getChildNodes(); + for (int i = 0, length = children.getLength(); i < length; i++) { + final Node childNode = children.item(i); + if (ELEMENT_NODE == childNode.getNodeType()) { + return (Element) childNode; + } + } + return null; + } + + private static Text getFirstChildText(final Node node) { + final NodeList children = node.getChildNodes(); + for (int i = 0, length = children.getLength(); i < length; i++) { + final Node childNode = children.item(i); + if (TEXT_NODE == childNode.getNodeType()) { + return (Text) childNode; + } + } + return null; + } + + // json numbers are 64 bit wide floating point numbers - in java terms it is either long or double + private static Number parseNumber(final String numberText) { + if (NOT_DECIMAL_NUMBER_PATTERN.matcher(numberText).matches()) { + return Long.valueOf(numberText); + } + + return Double.valueOf(numberText); + } }