package org.opendaylight.yangtools.yang.data.codec.gson;
import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
-import com.google.common.collect.ImmutableSet;
import com.google.gson.stream.JsonWriter;
-
import java.io.IOException;
import java.io.Writer;
-import java.math.BigDecimal;
-import java.math.BigInteger;
import java.net.URI;
-import java.util.ArrayDeque;
-import java.util.Collection;
-import java.util.Deque;
-
-import org.opendaylight.yangtools.concepts.Codec;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.AugmentationIdentifier;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates;
import org.opendaylight.yangtools.yang.model.api.AnyXmlSchemaNode;
import org.opendaylight.yangtools.yang.model.api.LeafListSchemaNode;
import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode;
-import org.opendaylight.yangtools.yang.model.api.Module;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import org.opendaylight.yangtools.yang.model.api.SchemaNode;
import org.opendaylight.yangtools.yang.model.api.SchemaPath;
/**
*
* Values of leaf and leaf-list are NOT translated according to codecs.
*
- * FIXME: rewrite this in terms of {@link JsonWriter}.
*/
public class JSONNormalizedNodeStreamWriter implements NormalizedNodeStreamWriter {
+ /**
+ * RFC6020 deviation: we are not required to emit empty containers unless they
+ * are marked as 'presence'.
+ */
+ private static final boolean DEFAULT_EMIT_EMPTY_CONTAINERS = true;
- private static enum NodeType {
- OBJECT,
- LIST,
- OTHER,
- }
-
- private static class TypeInfo {
- private boolean hasAtLeastOneChild = false;
- private final NodeType type;
- private final URI uri;
-
- public TypeInfo(final NodeType type, final URI uri) {
- this.type = type;
- this.uri = uri;
- }
-
- public void setHasAtLeastOneChild(final boolean hasChildren) {
- this.hasAtLeastOneChild = hasChildren;
- }
-
- public NodeType getType() {
- return type;
- }
-
- public URI getNamespace() {
- return uri;
- }
-
- public boolean hasAtLeastOneChild() {
- return hasAtLeastOneChild;
- }
- }
-
- private static final Collection<Class<?>> NUMERIC_CLASSES =
- ImmutableSet.<Class<?>>of(Byte.class, Short.class, Integer.class, Long.class, BigInteger.class, BigDecimal.class);
- private final Deque<TypeInfo> stack = new ArrayDeque<>();
- private final SchemaContext schemaContext;
- private final CodecFactory codecs;
private final SchemaTracker tracker;
- private final Writer writer;
- private final String indent;
-
- private int currentDepth = 0;
- private URI currentNamespace;
-
- private JSONNormalizedNodeStreamWriter(final SchemaContext schemaContext,
- final Writer writer, final int indentSize) {
- this(schemaContext, SchemaPath.ROOT, writer, null, indentSize);
- }
-
- private JSONNormalizedNodeStreamWriter(final SchemaContext schemaContext, final SchemaPath path,
- final Writer writer, final URI initialNs, final int indentSize) {
- this.schemaContext = Preconditions.checkNotNull(schemaContext);
- this.writer = Preconditions.checkNotNull(writer);
-
- Preconditions.checkArgument(indentSize >= 0, "Indent size must be non-negative");
- if (indentSize != 0) {
- indent = Strings.repeat(" ", indentSize);
- } else {
- indent = null;
- }
- this.codecs = CodecFactory.create(schemaContext);
- this.tracker = SchemaTracker.create(schemaContext, path);
+ private final JSONCodecFactory codecs;
+ private final JsonWriter writer;
+ private JSONStreamWriterContext context;
- this.currentNamespace = initialNs;
+ private JSONNormalizedNodeStreamWriter(final JSONCodecFactory codecFactory, final SchemaPath path, final URI initialNs, JsonWriter JsonWriter) {
+ this.writer = JsonWriter;
+ this.codecs = Preconditions.checkNotNull(codecFactory);
+ this.tracker = SchemaTracker.create(codecFactory.getSchemaContext(), path);
+ this.context = new JSONStreamWriterRootContext(initialNs);
}
/**
* @return A stream writer instance
*/
public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final Writer writer) {
- return new JSONNormalizedNodeStreamWriter(schemaContext, writer, 0);
+ return create(schemaContext, SchemaPath.ROOT, null, writer);
}
/**
* Create a new stream writer, which writes to the specified {@link Writer}.
*
* @param schemaContext Schema context
+ * @param path Root schemapath
* @param writer Output writer
* @return A stream writer instance
*/
- public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final SchemaPath path,final Writer writer) {
- return new JSONNormalizedNodeStreamWriter(schemaContext, path, writer, null, 0);
+ public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final SchemaPath path, final Writer writer) {
+ return create(schemaContext, path, null, writer);
}
/**
* Create a new stream writer, which writes to the specified {@link Writer}.
*
* @param schemaContext Schema context
+ * @param path Root schemapath
* @param writer Output writer
* @param initialNs Initial namespace
* @return A stream writer instance
*/
- public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final SchemaPath path,final URI initialNs, final Writer writer) {
- return new JSONNormalizedNodeStreamWriter(schemaContext, path, writer, initialNs, 0);
+ public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final SchemaPath path,
+ final URI initialNs, final Writer writer) {
+ return create(JSONCodecFactory.create(schemaContext), path, initialNs, JsonWriterFactory.createJsonWriter(writer));
}
/**
* @return A stream writer instance
*/
public static NormalizedNodeStreamWriter create(final SchemaContext schemaContext, final Writer writer, final int indentSize) {
- return new JSONNormalizedNodeStreamWriter(schemaContext, writer, indentSize);
+ return create(JSONCodecFactory.create(schemaContext), SchemaPath.ROOT, null,JsonWriterFactory.createJsonWriter(writer, indentSize));
+ }
+
+ /**
+ * Create a new stream writer, which writes to the specified output stream. The codec factory
+ * can be reused between multiple writers.
+ *
+ * @param codecFactory JSON codec factory
+ * @param writer Output writer
+ * @param indentSize indentation size
+ * @return A stream writer instance
+ */
+ public static NormalizedNodeStreamWriter create(final JSONCodecFactory codecFactory, final Writer writer, final int indentSize) {
+ return create(codecFactory, SchemaPath.ROOT, null, JsonWriterFactory.createJsonWriter(writer,indentSize));
+ }
+
+ /**
+ * Create a new stream writer, which writes to the specified output stream.
+ *
+ * @param schemaContext Schema context
+ * @param path Schema Path
+ * @param initialNs Initial namespace
+ * @param jsonWriter JsonWriter
+ * @return A stream writer instance
+ */
+ public static NormalizedNodeStreamWriter create(SchemaContext schemaContext, SchemaPath path, URI initialNs,
+ JsonWriter jsonWriter) {
+ return create(JSONCodecFactory.create(schemaContext), path, initialNs, jsonWriter);
+ }
+
+ /**
+ * Create a new stream writer, which writes to the specified output stream. The codec factory
+ * can be reused between multiple writers.
+ *
+ * @param codecFactory JSON codec factory
+ * @param path Schema Path
+ * @param initialNs Initial namespace
+ * @param jsonWriter JsonWriter
+ * @return A stream writer instance
+ */
+ public static NormalizedNodeStreamWriter create(JSONCodecFactory codecFactory, SchemaPath path, URI initialNs, JsonWriter jsonWriter) {
+ return new JSONNormalizedNodeStreamWriter(codecFactory, path, initialNs, jsonWriter);
}
@Override
public void leafNode(final NodeIdentifier name, final Object value) throws IOException {
final LeafSchemaNode schema = tracker.leafNode(name);
- final Codec<Object, Object> codec = codecs.codecFor(schema.getType());
+ final JSONCodec<Object> codec = codecs.codecFor(schema.getType());
+
+ context.emittingChild(codecs.getSchemaContext(), writer);
+ context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType());
- separateElementFromPreviousElement();
- writeJsonIdentifier(name);
- currentNamespace = stack.peek().getNamespace();
- writeValue(codec.serialize(value));
- separateNextSiblingsWithComma();
+ writeValue(value, codec);
}
@Override
public void startLeafSet(final NodeIdentifier name, final int childSizeHint) throws IOException {
tracker.startLeafSet(name);
-
- separateElementFromPreviousElement();
- stack.push(new TypeInfo(NodeType.LIST, name.getNodeType().getNamespace()));
- writeJsonIdentifier(name);
- writeStartList();
- indentRight();
+ context = new JSONStreamWriterListContext(context, name);
}
@Override
public void leafSetEntryNode(final Object value) throws IOException {
final LeafListSchemaNode schema = tracker.leafSetEntryNode();
- final Codec<Object, Object> codec = codecs.codecFor(schema.getType());
+ final JSONCodec<Object> codec = codecs.codecFor(schema.getType());
- separateElementFromPreviousElement();
- writeValue(codec.serialize(value));
- separateNextSiblingsWithComma();
+ context.emittingChild(codecs.getSchemaContext(), writer);
+
+ writeValue(value, codec);
}
+ /*
+ * Warning suppressed due to static final constant which triggers a warning
+ * for the call to schema.isPresenceContainer().
+ */
+ @SuppressWarnings("unused")
@Override
public void startContainerNode(final NodeIdentifier name, final int childSizeHint) throws IOException {
- tracker.startContainerNode(name);
+ final SchemaNode schema = tracker.startContainerNode(name);
- separateElementFromPreviousElement();
- stack.push(new TypeInfo(NodeType.OBJECT, name.getNodeType().getNamespace()));
- writeJsonIdentifier(name);
- writeStartObject();
- indentRight();
+ // FIXME this code ignores presence for containers
+ // but datastore does as well and it needs be fixed first (2399)
+ context = new JSONStreamWriterNamedObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS);
}
@Override
public void startUnkeyedList(final NodeIdentifier name, final int childSizeHint) throws IOException {
tracker.startList(name);
-
- separateElementFromPreviousElement();
- stack.push(new TypeInfo(NodeType.LIST, name.getNodeType().getNamespace()));
- writeJsonIdentifier(name);
- writeStartList();
- indentRight();
+ context = new JSONStreamWriterListContext(context, name);
}
@Override
public void startUnkeyedListItem(final NodeIdentifier name, final int childSizeHint) throws IOException {
tracker.startListItem(name);
-
- separateElementFromPreviousElement();
- stack.push(new TypeInfo(NodeType.OBJECT, name.getNodeType().getNamespace()));
- writeStartObject();
- indentRight();
+ context = new JSONStreamWriterObjectContext(context, name, DEFAULT_EMIT_EMPTY_CONTAINERS);
}
@Override
public void startMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException {
tracker.startList(name);
-
- separateElementFromPreviousElement();
- stack.push(new TypeInfo(NodeType.LIST, name.getNodeType().getNamespace()));
- writeJsonIdentifier(name);
- writeStartList();
- indentRight();
+ context = new JSONStreamWriterListContext(context, name);
}
@Override
public void startMapEntryNode(final NodeIdentifierWithPredicates identifier, final int childSizeHint)
throws IOException {
tracker.startListItem(identifier);
- separateElementFromPreviousElement();
- stack.push(new TypeInfo(NodeType.OBJECT, identifier.getNodeType().getNamespace()));
-
-
- writeStartObject();
- indentRight();
+ context = new JSONStreamWriterObjectContext(context, identifier, DEFAULT_EMIT_EMPTY_CONTAINERS);
}
@Override
public void startOrderedMapNode(final NodeIdentifier name, final int childSizeHint) throws IOException {
- tracker.startListItem(name);
-
- stack.push(new TypeInfo(NodeType.LIST, name.getNodeType().getNamespace()));
- separateElementFromPreviousElement();
- writeJsonIdentifier(name);
- writeStartList();
- indentRight();
+ tracker.startList(name);
+ context = new JSONStreamWriterListContext(context, name);
}
@Override
- public void startChoiceNode(final NodeIdentifier name, final int childSizeHint) throws IllegalArgumentException {
+ public void startChoiceNode(final NodeIdentifier name, final int childSizeHint) {
tracker.startChoiceNode(name);
- handleInvisibleNode(name.getNodeType().getNamespace());
+ context = new JSONStreamWriterInvisibleContext(context);
}
@Override
- public void startAugmentationNode(final AugmentationIdentifier identifier) throws IllegalArgumentException {
+ public void startAugmentationNode(final AugmentationIdentifier identifier) {
tracker.startAugmentationNode(identifier);
- handleInvisibleNode(currentNamespace);
+ context = new JSONStreamWriterInvisibleContext(context);
}
@Override
public void anyxmlNode(final NodeIdentifier name, final Object value) throws IOException {
+ @SuppressWarnings("unused")
final AnyXmlSchemaNode schema = tracker.anyxmlNode(name);
// FIXME: should have a codec based on this :)
- separateElementFromPreviousElement();
- writeJsonIdentifier(name);
- currentNamespace = stack.peek().getNamespace();
- writeValue(value);
- separateNextSiblingsWithComma();
+ context.emittingChild(codecs.getSchemaContext(), writer);
+ context.writeChildJsonIdentifier(codecs.getSchemaContext(), writer, name.getNodeType());
+ writer.value(String.valueOf(value));
}
@Override
public void endNode() throws IOException {
tracker.endNode();
-
- final TypeInfo t = stack.pop();
- switch (t.getType()) {
- case LIST:
- indentLeft();
- newLine();
- writer.append(']');
- break;
- case OBJECT:
- indentLeft();
- newLine();
- writer.append('}');
- break;
- default:
- break;
- }
-
- currentNamespace = stack.isEmpty() ? null : stack.peek().getNamespace();
- separateNextSiblingsWithComma();
- }
-
- private void separateElementFromPreviousElement() throws IOException {
- if (!stack.isEmpty() && stack.peek().hasAtLeastOneChild()) {
- writer.append(',');
+ context = context.endNode(codecs.getSchemaContext(), writer);
+ if(context instanceof JSONStreamWriterRootContext) {
+ context.emitEnd(writer);
}
- newLine();
}
- private void newLine() throws IOException {
- if (indent != null) {
- writer.append('\n');
-
- for (int i = 0; i < currentDepth; i++) {
- writer.append(indent);
- }
- }
- }
-
- private void separateNextSiblingsWithComma() {
- if (!stack.isEmpty()) {
- stack.peek().setHasAtLeastOneChild(true);
- }
- }
-
- /**
- * Invisible nodes have to be also pushed to stack because of pairing of start*() and endNode() methods. Information
- * about child existing (due to printing comma) has to be transfered to invisible node.
- */
- private void handleInvisibleNode(final URI uri) {
- TypeInfo typeInfo = new TypeInfo(NodeType.OTHER, uri);
- typeInfo.setHasAtLeastOneChild(stack.peek().hasAtLeastOneChild());
- stack.push(typeInfo);
- }
-
- private void writeStartObject() throws IOException {
- writer.append('{');
- }
-
- private void writeStartList() throws IOException {
- writer.append('[');
- }
-
- private void writeModulName(final URI namespace) throws IOException {
- if (this.currentNamespace == null || namespace != this.currentNamespace) {
- Module module = schemaContext.findModuleByNamespaceAndRevision(namespace, null);
- writer.append(module.getName());
- writer.append(':');
- currentNamespace = namespace;
- }
- }
-
- private void writeValue(final Object value) throws IOException {
- final String str = String.valueOf(value);
-
- if (!NUMERIC_CLASSES.contains(value.getClass())) {
- writer.append('"');
- writer.append(str);
- writer.append('"');
- } else {
- writer.append(str);
- }
- }
-
- private void writeJsonIdentifier(final NodeIdentifier name) throws IOException {
- writer.append('"');
- writeModulName(name.getNodeType().getNamespace());
- writer.append(name.getNodeType().getLocalName());
- writer.append("\":");
- }
-
- private void indentRight() {
- currentDepth++;
- }
-
- private void indentLeft() {
- currentDepth--;
+ private void writeValue(Object value, JSONCodec<Object> codec)
+ throws IOException {
+ codec.serializeToWriter(writer,value);
}
@Override
writer.close();
}
+
+
}