import java.io.EOFException;
import java.io.Flushable;
import java.io.IOException;
-import java.net.URI;
import java.util.AbstractMap.SimpleImmutableEntry;
import java.util.ArrayDeque;
-import java.util.Collections;
import java.util.Deque;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map.Entry;
import java.util.Set;
import javax.xml.transform.dom.DOMSource;
-import org.opendaylight.yangtools.odlext.model.api.YangModeledAnyXmlSchemaNode;
+import org.eclipse.jdt.annotation.NonNull;
import org.opendaylight.yangtools.util.xml.UntrustedXML;
+import org.opendaylight.yangtools.yang.common.QName;
+import org.opendaylight.yangtools.yang.common.XMLNamespace;
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
import org.opendaylight.yangtools.yang.data.util.AbstractNodeDataWithSchema;
import org.opendaylight.yangtools.yang.data.util.AnyXmlNodeDataWithSchema;
import org.opendaylight.yangtools.yang.data.util.CompositeNodeDataWithSchema;
-import org.opendaylight.yangtools.yang.data.util.LeafListEntryNodeDataWithSchema;
+import org.opendaylight.yangtools.yang.data.util.CompositeNodeDataWithSchema.ChildReusePolicy;
import org.opendaylight.yangtools.yang.data.util.LeafListNodeDataWithSchema;
import org.opendaylight.yangtools.yang.data.util.LeafNodeDataWithSchema;
-import org.opendaylight.yangtools.yang.data.util.ListEntryNodeDataWithSchema;
import org.opendaylight.yangtools.yang.data.util.ListNodeDataWithSchema;
+import org.opendaylight.yangtools.yang.data.util.MultipleEntryDataWithSchema;
+import org.opendaylight.yangtools.yang.data.util.OperationAsContainer;
import org.opendaylight.yangtools.yang.data.util.ParserStreamUtils;
-import org.opendaylight.yangtools.yang.data.util.RpcAsContainer;
import org.opendaylight.yangtools.yang.data.util.SimpleNodeDataWithSchema;
-import org.opendaylight.yangtools.yang.model.api.ChoiceCaseNode;
+import org.opendaylight.yangtools.yang.model.api.CaseSchemaNode;
import org.opendaylight.yangtools.yang.model.api.ChoiceSchemaNode;
import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.EffectiveStatementInference;
import org.opendaylight.yangtools.yang.model.api.Module;
-import org.opendaylight.yangtools.yang.model.api.RpcDefinition;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import org.opendaylight.yangtools.yang.model.api.SchemaNode;
+import org.opendaylight.yangtools.yang.model.api.OperationDefinition;
import org.opendaylight.yangtools.yang.model.api.TypedDataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.meta.EffectiveStatement;
+import org.opendaylight.yangtools.yang.model.util.SchemaInferenceStack;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Text;
public final class JsonParserStream implements Closeable, Flushable {
static final String ANYXML_ARRAY_ELEMENT_ID = "array-element";
- private final Deque<URI> namespaces = new ArrayDeque<>();
+ private static final Logger LOG = LoggerFactory.getLogger(JsonParserStream.class);
+ private final Deque<XMLNamespace> namespaces = new ArrayDeque<>();
private final NormalizedNodeStreamWriter writer;
private final JSONCodecFactory codecs;
- private final SchemaContext schema;
private final DataSchemaNode parentNode;
- private JsonParserStream(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext,
- final JSONCodecFactory codecs, final DataSchemaNode parentNode) {
- this.schema = requireNonNull(schemaContext);
+ private final SchemaInferenceStack stack;
+
+ // TODO: consider class specialization to remove this field
+ private final boolean lenient;
+
+ private JsonParserStream(final NormalizedNodeStreamWriter writer, final JSONCodecFactory codecs,
+ final SchemaInferenceStack stack, final boolean lenient) {
this.writer = requireNonNull(writer);
this.codecs = requireNonNull(codecs);
- this.parentNode = parentNode;
+ this.stack = requireNonNull(stack);
+ this.lenient = lenient;
+
+ if (!stack.isEmpty()) {
+ final EffectiveStatement<?, ?> parent = stack.currentStatement();
+ if (parent instanceof DataSchemaNode) {
+ parentNode = (DataSchemaNode) parent;
+ } else if (parent instanceof OperationDefinition) {
+ parentNode = OperationAsContainer.of((OperationDefinition) parent);
+ } else {
+ throw new IllegalArgumentException("Illegal parent node " + parent);
+ }
+ } else {
+ parentNode = stack.getEffectiveModelContext();
+ }
}
- private JsonParserStream(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext,
- final DataSchemaNode parentNode) {
- this(writer, schemaContext, JSONCodecFactory.getShared(schemaContext), parentNode);
+ /**
+ * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+ * and {@link JSONCodecFactory}. The stream will be logically rooted at the top of the SchemaContext associated
+ * with the specified codec factory.
+ *
+ * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+ * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves
+ * @return A new {@link JsonParserStream}
+ * @throws NullPointerException if any of the arguments are null
+ */
+ public static @NonNull JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+ final @NonNull JSONCodecFactory codecFactory) {
+ return new JsonParserStream(writer, codecFactory,
+ SchemaInferenceStack.of(codecFactory.getEffectiveModelContext()), false);
}
- public static JsonParserStream create(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext,
- final SchemaNode parentNode) {
- if (parentNode instanceof RpcDefinition) {
- return new JsonParserStream(writer, schemaContext, new RpcAsContainer((RpcDefinition) parentNode));
- }
- checkArgument(parentNode instanceof DataSchemaNode, "Instance of DataSchemaNode class awaited.");
- return new JsonParserStream(writer, schemaContext, (DataSchemaNode) parentNode);
+ /**
+ * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+ * and {@link JSONCodecFactory}. The stream will be logically rooted at the specified parent node.
+ *
+ * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+ * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves
+ * @param parentNode Logical root node
+ * @return A new {@link JsonParserStream}
+ * @throws NullPointerException if any of the arguments are null
+ */
+ public static @NonNull JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+ final @NonNull JSONCodecFactory codecFactory, final @NonNull EffectiveStatementInference parentNode) {
+ return new JsonParserStream(writer, codecFactory, SchemaInferenceStack.ofInference(parentNode), false);
}
- public static JsonParserStream create(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext) {
- return new JsonParserStream(writer, schemaContext, schemaContext);
+ /**
+ * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+ * and {@link JSONCodecFactory}. The stream will be logically rooted at the top of the SchemaContext associated
+ * with the specified codec factory.
+ *
+ * <p>
+ * Returned parser will treat incoming JSON data leniently:
+ * <ul>
+ * <li>JSON elements referring to unknown constructs will be silently ignored</li>
+ * </ul>
+ *
+ * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+ * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves
+ * @return A new {@link JsonParserStream}
+ * @throws NullPointerException if any of the arguments are null
+ */
+ public static @NonNull JsonParserStream createLenient(final @NonNull NormalizedNodeStreamWriter writer,
+ final @NonNull JSONCodecFactory codecFactory) {
+ return new JsonParserStream(writer, codecFactory,
+ SchemaInferenceStack.of(codecFactory.getEffectiveModelContext()), true);
+ }
+
+ /**
+ * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+ * and {@link JSONCodecFactory}. The stream will be logically rooted at the specified parent node.
+ *
+ * <p>
+ * Returned parser will treat incoming JSON data leniently:
+ * <ul>
+ * <li>JSON elements referring to unknown constructs will be silently ignored</li>
+ * </ul>
+ *
+ * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+ * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves
+ * @param parentNode Logical root node
+ * @return A new {@link JsonParserStream}
+ * @throws NullPointerException if any of the arguments are null
+ */
+ public static @NonNull JsonParserStream createLenient(final @NonNull NormalizedNodeStreamWriter writer,
+ final @NonNull JSONCodecFactory codecFactory, final @NonNull EffectiveStatementInference parentNode) {
+ return new JsonParserStream(writer, codecFactory, SchemaInferenceStack.ofInference(parentNode), true);
}
public JsonParserStream parse(final JsonReader reader) {
// code copied from gson's JsonParser and Stream classes
- final boolean lenient = reader.isLenient();
+ final boolean readerLenient = reader.isLenient();
reader.setLenient(true);
boolean isEmpty = true;
try {
reader.peek();
isEmpty = false;
- final CompositeNodeDataWithSchema compositeNodeDataWithSchema = new CompositeNodeDataWithSchema(parentNode);
+ final CompositeNodeDataWithSchema<?> compositeNodeDataWithSchema =
+ new CompositeNodeDataWithSchema<>(parentNode);
read(reader, compositeNodeDataWithSchema);
compositeNodeDataWithSchema.write(writer);
} catch (StackOverflowError | OutOfMemoryError e) {
throw new JsonParseException("Failed parsing JSON source: " + reader + " to Json", e);
} finally {
- reader.setLenient(lenient);
+ reader.setLenient(readerLenient);
}
}
parent.setValue(domSource);
}
- public void read(final JsonReader in, AbstractNodeDataWithSchema parent) throws IOException {
+ public void read(final JsonReader in, AbstractNodeDataWithSchema<?> parent) throws IOException {
switch (in.peek()) {
case STRING:
case NUMBER:
if (parent instanceof LeafNodeDataWithSchema) {
read(in, parent);
} else {
- final AbstractNodeDataWithSchema newChild = newArrayEntry(parent);
+ final AbstractNodeDataWithSchema<?> newChild = newArrayEntry(parent);
read(in, newChild);
}
}
}
while (in.hasNext()) {
final String jsonElementName = in.nextName();
- DataSchemaNode parentSchema = parent.getSchema();
- if (parentSchema instanceof YangModeledAnyXmlSchemaNode) {
- parentSchema = ((YangModeledAnyXmlSchemaNode) parentSchema).getSchemaOfAnyXmlData();
- }
- final Entry<String, URI> namespaceAndName = resolveNamespace(jsonElementName, parentSchema);
+ final DataSchemaNode parentSchema = parent.getSchema();
+ final Entry<String, XMLNamespace> namespaceAndName =
+ resolveNamespace(jsonElementName, parentSchema);
final String localName = namespaceAndName.getKey();
- addNamespace(namespaceAndName.getValue());
+ final XMLNamespace namespace = namespaceAndName.getValue();
+ if (lenient && (localName == null || namespace == null)) {
+ LOG.debug("Schema node with name {} was not found under {}", localName,
+ parentSchema.getQName());
+ in.skipValue();
+ continue;
+ }
+ addNamespace(namespace);
if (!namesakes.add(jsonElementName)) {
throw new JsonSyntaxException("Duplicate name " + jsonElementName + " in JSON input.");
}
ParserStreamUtils.findSchemaNodeByNameAndNamespace(parentSchema, localName,
getCurrentNamespace());
checkState(!childDataSchemaNodes.isEmpty(),
- "Schema for node with name %s and namespace %s does not exist.", localName,
- getCurrentNamespace());
+ "Schema for node with name %s and namespace %s does not exist at %s",
+ localName, getCurrentNamespace(), parentSchema);
- final AbstractNodeDataWithSchema newChild = ((CompositeNodeDataWithSchema) parent)
- .addChild(childDataSchemaNodes);
+ final QName qname = childDataSchemaNodes.peekLast().getQName();
+ final AbstractNodeDataWithSchema<?> newChild = ((CompositeNodeDataWithSchema<?>) parent)
+ .addChild(childDataSchemaNodes, ChildReusePolicy.NOOP);
if (newChild instanceof AnyXmlNodeDataWithSchema) {
readAnyXmlValue(in, (AnyXmlNodeDataWithSchema) newChild, jsonElementName);
} else {
+ stack.enterDataTree(qname);
read(in, newChild);
+ stack.exit();
}
removeNamespace();
}
}
}
- private static boolean isArray(final AbstractNodeDataWithSchema parent) {
+ private static boolean isArray(final AbstractNodeDataWithSchema<?> parent) {
return parent instanceof ListNodeDataWithSchema || parent instanceof LeafListNodeDataWithSchema;
}
- private static AbstractNodeDataWithSchema newArrayEntry(final AbstractNodeDataWithSchema parent) {
- AbstractNodeDataWithSchema newChild;
- if (parent instanceof ListNodeDataWithSchema) {
- newChild = new ListEntryNodeDataWithSchema(parent.getSchema());
- } else if (parent instanceof LeafListNodeDataWithSchema) {
- newChild = new LeafListEntryNodeDataWithSchema(parent.getSchema());
- } else {
+ private static AbstractNodeDataWithSchema<?> newArrayEntry(final AbstractNodeDataWithSchema<?> parent) {
+ if (!(parent instanceof MultipleEntryDataWithSchema)) {
throw new IllegalStateException("Found an unexpected array nested under " + parent.getSchema().getQName());
}
- ((CompositeNodeDataWithSchema) parent).addChild(newChild);
- return newChild;
+ return ((MultipleEntryDataWithSchema<?>) parent).newChildEntry();
}
- private void setValue(final AbstractNodeDataWithSchema parent, final String value) {
+ private void setValue(final AbstractNodeDataWithSchema<?> parent, final String value) {
checkArgument(parent instanceof SimpleNodeDataWithSchema, "Node %s is not a simple type",
parent.getSchema().getQName());
- final SimpleNodeDataWithSchema parentSimpleNode = (SimpleNodeDataWithSchema) parent;
+ final SimpleNodeDataWithSchema<?> parentSimpleNode = (SimpleNodeDataWithSchema<?>) parent;
checkArgument(parentSimpleNode.getValue() == null, "Node '%s' has already set its value to '%s'",
parentSimpleNode.getSchema().getQName(), parentSimpleNode.getValue());
private Object translateValueByType(final String value, final DataSchemaNode node) {
checkArgument(node instanceof TypedDataSchemaNode);
- return codecs.codecFor((TypedDataSchemaNode) node).parseValue(null, value);
+ return codecs.codecFor((TypedDataSchemaNode) node, stack).parseValue(null, value);
}
private void removeNamespace() {
namespaces.pop();
}
- private void addNamespace(final URI namespace) {
+ private void addNamespace(final XMLNamespace namespace) {
namespaces.push(namespace);
}
- private Entry<String, URI> resolveNamespace(final String childName, final DataSchemaNode dataSchemaNode) {
+ private Entry<String, XMLNamespace> resolveNamespace(final String childName, final DataSchemaNode dataSchemaNode) {
final int lastIndexOfColon = childName.lastIndexOf(':');
String moduleNamePart = null;
String nodeNamePart = null;
- URI namespace = null;
+ XMLNamespace namespace = null;
if (lastIndexOfColon != -1) {
moduleNamePart = childName.substring(0, lastIndexOfColon);
nodeNamePart = childName.substring(lastIndexOfColon + 1);
- final Iterator<Module> m = schema.findModules(moduleNamePart).iterator();
+ final Iterator<? extends Module> m = codecs.getEffectiveModelContext().findModules(moduleNamePart)
+ .iterator();
namespace = m.hasNext() ? m.next().getNamespace() : null;
} else {
nodeNamePart = childName;
}
if (namespace == null) {
- Set<URI> potentialUris = Collections.emptySet();
- potentialUris = resolveAllPotentialNamespaces(nodeNamePart, dataSchemaNode);
+ final Set<XMLNamespace> potentialUris = resolveAllPotentialNamespaces(nodeNamePart, dataSchemaNode);
if (potentialUris.contains(getCurrentNamespace())) {
namespace = getCurrentNamespace();
} else if (potentialUris.size() == 1) {
} else if (potentialUris.size() > 1) {
throw new IllegalStateException("Choose suitable module name for element " + nodeNamePart + ":"
+ toModuleNames(potentialUris));
- } else if (potentialUris.isEmpty()) {
+ } else if (potentialUris.isEmpty() && !lenient) {
throw new IllegalStateException("Schema node with name " + nodeNamePart + " was not found under "
+ dataSchemaNode.getQName() + ".");
}
return new SimpleImmutableEntry<>(nodeNamePart, namespace);
}
- private String toModuleNames(final Set<URI> potentialUris) {
+ private String toModuleNames(final Set<XMLNamespace> potentialUris) {
final StringBuilder builder = new StringBuilder();
- for (final URI potentialUri : potentialUris) {
+ for (final XMLNamespace potentialUri : potentialUris) {
builder.append('\n');
//FIXME how to get information about revision from JSON input? currently first available is used.
- builder.append(schema.findModules(potentialUri).iterator().next().getName());
+ builder.append(codecs.getEffectiveModelContext().findModules(potentialUri).iterator().next().getName());
}
return builder.toString();
}
- private Set<URI> resolveAllPotentialNamespaces(final String elementName, final DataSchemaNode dataSchemaNode) {
- final Set<URI> potentialUris = new HashSet<>();
+ private Set<XMLNamespace> resolveAllPotentialNamespaces(final String elementName,
+ final DataSchemaNode dataSchemaNode) {
+ final Set<XMLNamespace> potentialUris = new HashSet<>();
final Set<ChoiceSchemaNode> choices = new HashSet<>();
if (dataSchemaNode instanceof DataNodeContainer) {
for (final DataSchemaNode childSchemaNode : ((DataNodeContainer) dataSchemaNode).getChildNodes()) {
}
for (final ChoiceSchemaNode choiceNode : choices) {
- for (final ChoiceCaseNode concreteCase : choiceNode.getCases().values()) {
+ for (final CaseSchemaNode concreteCase : choiceNode.getCases()) {
potentialUris.addAll(resolveAllPotentialNamespaces(elementName, concreteCase));
}
}
return potentialUris;
}
- private URI getCurrentNamespace() {
+ private XMLNamespace getCurrentNamespace() {
return namespaces.peek();
}