/*
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.yangtools.yang.data.codec.gson;
import static com.google.common.base.Preconditions.checkState;
import static java.util.Objects.requireNonNull;
import static org.w3c.dom.Node.ELEMENT_NODE;
import static org.w3c.dom.Node.TEXT_NODE;
import com.google.common.collect.ClassToInstanceMap;
import com.google.common.collect.ImmutableClassToInstanceMap;
import com.google.gson.stream.JsonWriter;
import java.io.IOException;
import java.net.URI;
import java.util.regex.Pattern;
import javax.xml.transform.dom.DOMSource;
import org.checkerframework.checker.regex.qual.Regex;
import org.opendaylight.yangtools.rfc8528.data.api.MountPointContext;
import org.opendaylight.yangtools.rfc8528.data.api.MountPointIdentifier;
import org.opendaylight.yangtools.rfc8528.data.api.StreamWriterMountPointExtension;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.AugmentationIdentifier;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeWithValue;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedAnydata;
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriterExtension;
import org.opendaylight.yangtools.yang.data.impl.codec.SchemaTracker;
import org.opendaylight.yangtools.yang.data.util.SingleChildDataNodeContainer;
import org.opendaylight.yangtools.yang.model.api.AnydataSchemaNode;
import org.opendaylight.yangtools.yang.model.api.AnyxmlSchemaNode;
import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.opendaylight.yangtools.yang.model.api.SchemaNode;
import org.opendaylight.yangtools.yang.model.api.SchemaPath;
import org.opendaylight.yangtools.yang.model.api.TypedDataSchemaNode;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;
/**
* This implementation will create JSON output as output stream.
*
*
* Values of leaf and leaf-list are NOT translated according to codecs.
*/
public abstract class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWriter,
StreamWriterMountPointExtension {
private static final class Exclusive extends JSONNormalizedNodeStreamWriter {
Exclusive(final JSONCodecFactory codecFactory, final SchemaTracker tracker, final JsonWriter writer,
final JSONStreamWriterRootContext rootContext) {
super(codecFactory, tracker, writer, rootContext);
}
@Override
public void close() throws IOException {
flush();
closeWriter();
}
}
private static final class Nested extends JSONNormalizedNodeStreamWriter {
Nested(final JSONCodecFactory codecFactory, final SchemaTracker tracker, final JsonWriter writer,
final JSONStreamWriterRootContext rootContext) {
super(codecFactory, tracker, writer, rootContext);
}
@Override
public void close() throws IOException {
flush();
// The caller "owns" the writer, let them close it
}
}
/**
* RFC6020 deviation: we are not required to emit empty containers unless they
* are marked as 'presence'.
*/
private static final boolean DEFAULT_EMIT_EMPTY_CONTAINERS = true;
@Regex
private static final String NUMBER_STRING = "-?\\d+(\\.\\d+)?";
private static final Pattern NUMBER_PATTERN = Pattern.compile(NUMBER_STRING);
@Regex
private static final String NOT_DECIMAL_NUMBER_STRING = "-?\\d+";
private static final Pattern NOT_DECIMAL_NUMBER_PATTERN = Pattern.compile(NOT_DECIMAL_NUMBER_STRING);
private final SchemaTracker tracker;
private final JSONCodecFactory codecs;
private final JsonWriter writer;
private JSONStreamWriterContext context;
JSONNormalizedNodeStreamWriter(final JSONCodecFactory codecFactory, final SchemaTracker tracker,
final JsonWriter writer, final JSONStreamWriterRootContext rootContext) {
this.writer = requireNonNull(writer);
this.codecs = requireNonNull(codecFactory);
this.tracker = requireNonNull(tracker);
this.context = requireNonNull(rootContext);
}
/**
* Create a new stream writer, which writes to the specified output stream.
*
*
* The codec factory can be reused between multiple writers.
*
*
* Returned writer is exclusive user of JsonWriter, which means it will start
* top-level JSON element and ends it.
*
*
* This instance of writer can be used only to emit one top level element,
* otherwise it will produce incorrect JSON. Closing this instance will close
* the writer too.
*
* @param codecFactory JSON codec factory
* @param path Schema Path
* @param initialNs Initial namespace
* @param jsonWriter JsonWriter
* @return A stream writer instance
*/
public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory,
final SchemaPath path, final URI initialNs, final JsonWriter jsonWriter) {
return new Exclusive(codecFactory, SchemaTracker.create(codecFactory.getSchemaContext(), path), jsonWriter,
new JSONStreamWriterExclusiveRootContext(initialNs));
}
/**
* Create a new stream writer, which writes to the specified output stream.
*
*
* The codec factory can be reused between multiple writers.
*
*
* Returned writer is exclusive user of JsonWriter, which means it will start
* top-level JSON element and ends it.
*
*
* This instance of writer can be used only to emit one top level element,
* otherwise it will produce incorrect JSON. Closing this instance will close
* the writer too.
*
* @param codecFactory JSON codec factory
* @param rootNode Root node
* @param initialNs Initial namespace
* @param jsonWriter JsonWriter
* @return A stream writer instance
*/
public static NormalizedNodeStreamWriter createExclusiveWriter(final JSONCodecFactory codecFactory,
final DataNodeContainer rootNode, final URI initialNs, final JsonWriter jsonWriter) {
return new Exclusive(codecFactory, SchemaTracker.create(rootNode), jsonWriter,
new JSONStreamWriterExclusiveRootContext(initialNs));
}
/**
* Create a new stream writer, which writes to the specified output stream.
*
*
* The codec factory can be reused between multiple writers.
*
*
* Returned writer can be used emit multiple top level element,
* but does not start / close parent JSON object, which must be done
* by user providing {@code jsonWriter} instance in order for
* JSON to be valid. Closing this instance will not
* close the wrapped writer; the caller must take care of that.
*
* @param codecFactory JSON codec factory
* @param path Schema Path
* @param initialNs Initial namespace
* @param jsonWriter JsonWriter
* @return A stream writer instance
*/
public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory,
final SchemaPath path, final URI initialNs, final JsonWriter jsonWriter) {
return new Nested(codecFactory, SchemaTracker.create(codecFactory.getSchemaContext(), path), jsonWriter,
new JSONStreamWriterSharedRootContext(initialNs));
}
/**
* Create a new stream writer, which writes to the specified output stream.
*
*
* The codec factory can be reused between multiple writers.
*
*
* Returned writer can be used emit multiple top level element,
* but does not start / close parent JSON object, which must be done
* by user providing {@code jsonWriter} instance in order for
* JSON to be valid. Closing this instance will not
* close the wrapped writer; the caller must take care of that.
*
* @param codecFactory JSON codec factory
* @param rootNode Root node
* @param initialNs Initial namespace
* @param jsonWriter JsonWriter
* @return A stream writer instance
*/
public static NormalizedNodeStreamWriter createNestedWriter(final JSONCodecFactory codecFactory,
final DataNodeContainer rootNode, final URI initialNs, final JsonWriter jsonWriter) {
return new Nested(codecFactory, SchemaTracker.create(rootNode), jsonWriter,
new JSONStreamWriterSharedRootContext(initialNs));
}
@Override
public ClassToInstanceMap getExtensions() {
return ImmutableClassToInstanceMap.of(StreamWriterMountPointExtension.class, this);
}
@Override
public void startLeafNode(final NodeIdentifier name) throws IOException {
tracker.startLeafNode(name);
context.emittingChild(codecs.getSchemaContext(), writer);
context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType());
}
@Override
public final void startLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException {
tracker.startLeafSet(name);
context = new JSONStreamWriterListContext(context, name);
}
@Override
public void startLeafSetEntryNode(final NodeWithValue> name) throws IOException {
tracker.startLeafSetEntryNode(name);
context.emittingChild(codecs.getSchemaContext(), writer);
}
@Override
public final void startOrderedLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException {
tracker.startLeafSet(name);
context = new JSONStreamWriterListContext(context, name);
}
/*
* Warning suppressed due to static final constant which triggers a warning
* for the call to schema.isPresenceContainer().
*/
@Override
public final void startContainerNode(final NodeIdentifier name, final int childSizeHint) throws IOException {
final SchemaNode schema = tracker.startContainerNode(name);
final boolean isPresence = schema instanceof ContainerSchemaNode
? ((ContainerSchemaNode) schema).isPresenceContainer() : DEFAULT_EMIT_EMPTY_CONTAINERS;
context = new JSONStreamWriterNamedObjectContext(context, name, isPresence);
}
@Override
public final void startUnkeyedList(final NodeIdentifier name, final int childSizeHint) throws IOException {
tracker.startList(name);
context = new JSONStreamWriterListContext(context, name);
}
@Override
public final void startUnkeyedListItem(final NodeIdentifier name, final int childSizeHint) throws IOException {
tracker.startListItem(name);
context = new JSONStreamWriterObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS);
}
@Override
public final void startMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException {
tracker.startList(name);
context = new JSONStreamWriterListContext(context, name);
}
@Override
public final void startMapEntryNode(final NodeIdentifierWithPredicates identifier, final int childSizeHint)
throws IOException {
tracker.startListItem(identifier);
context = new JSONStreamWriterObjectContext(context, identifier, DEFAULT_EMIT_EMPTY_CONTAINERS);
}
@Override
public final void startOrderedMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException {
tracker.startList(name);
context = new JSONStreamWriterListContext(context, name);
}
@Override
public final void startChoiceNode(final NodeIdentifier name, final int childSizeHint) {
tracker.startChoiceNode(name);
context = new JSONStreamWriterInvisibleContext(context);
}
@Override
public final void startAugmentationNode(final AugmentationIdentifier identifier) {
tracker.startAugmentationNode(identifier);
context = new JSONStreamWriterInvisibleContext(context);
}
@Override
public final boolean startAnydataNode(final NodeIdentifier name, final Class> objectModel) throws IOException {
if (NormalizedAnydata.class.isAssignableFrom(objectModel)) {
tracker.startAnydataNode(name);
context.emittingChild(codecs.getSchemaContext(), writer);
context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType());
return true;
}
return false;
}
@Override
public final NormalizedNodeStreamWriter startMountPoint(final MountPointIdentifier mountId,
final MountPointContext mountCtx) throws IOException {
final SchemaContext ctx = mountCtx.getSchemaContext();
return new Nested(codecs.rebaseTo(ctx), SchemaTracker.create(ctx), writer,
new JSONStreamWriterSharedRootContext(context.getNamespace()));
}
@Override
public final boolean startAnyxmlNode(final NodeIdentifier name, final Class> objectModel) throws IOException {
if (DOMSource.class.isAssignableFrom(objectModel)) {
tracker.startAnyxmlNode(name);
context.emittingChild(codecs.getSchemaContext(), writer);
context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType());
return true;
}
return false;
}
@Override
public final void startYangModeledAnyXmlNode(final NodeIdentifier name, final int childSizeHint)
throws IOException {
tracker.startYangModeledAnyXmlNode(name);
context = new JSONStreamWriterNamedObjectContext(context, name, true);
}
@Override
public final void endNode() throws IOException {
tracker.endNode();
context = context.endNode(codecs.getSchemaContext(), writer);
}
@Override
public final void flush() throws IOException {
writer.flush();
}
final void closeWriter() throws IOException {
if (!(context instanceof JSONStreamWriterRootContext)) {
throw new IOException("Unexpected root context " + context);
}
context.endNode(codecs.getSchemaContext(), writer);
writer.close();
}
@Override
public void scalarValue(final Object value) throws IOException {
final Object current = tracker.getParent();
if (current instanceof TypedDataSchemaNode) {
writeValue(value, codecs.codecFor((TypedDataSchemaNode) current));
} else if (current instanceof AnydataSchemaNode) {
writeAnydataValue(value);
} else {
throw new IllegalStateException(String.format("Cannot emit scalar %s for %s", value, current));
}
}
@Override
public void domSourceValue(final DOMSource value) throws IOException {
final Object current = tracker.getParent();
checkState(current instanceof AnyxmlSchemaNode, "Cannot emit DOMSource %s for %s", value, current);
// FIXME: should have a codec based on this :)
writeAnyXmlValue(value);
}
@SuppressWarnings("unchecked")
private void writeValue(final Object value, final JSONCodec> codec) throws IOException {
((JSONCodec