import java.net.URI;
import java.util.AbstractMap.SimpleImmutableEntry;
import java.util.ArrayDeque;
-import java.util.Collections;
import java.util.Deque;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import javax.xml.transform.dom.DOMSource;
import org.eclipse.jdt.annotation.NonNull;
-import org.opendaylight.yangtools.odlext.model.api.YangModeledAnyXmlSchemaNode;
+import org.opendaylight.yangtools.odlext.model.api.YangModeledAnyxmlSchemaNode;
import org.opendaylight.yangtools.util.xml.UntrustedXML;
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
import org.opendaylight.yangtools.yang.data.util.AbstractNodeDataWithSchema;
import org.opendaylight.yangtools.yang.data.util.LeafNodeDataWithSchema;
import org.opendaylight.yangtools.yang.data.util.ListEntryNodeDataWithSchema;
import org.opendaylight.yangtools.yang.data.util.ListNodeDataWithSchema;
+import org.opendaylight.yangtools.yang.data.util.OperationAsContainer;
import org.opendaylight.yangtools.yang.data.util.ParserStreamUtils;
-import org.opendaylight.yangtools.yang.data.util.RpcAsContainer;
import org.opendaylight.yangtools.yang.data.util.SimpleNodeDataWithSchema;
import org.opendaylight.yangtools.yang.model.api.CaseSchemaNode;
import org.opendaylight.yangtools.yang.model.api.ChoiceSchemaNode;
import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
import org.opendaylight.yangtools.yang.model.api.Module;
-import org.opendaylight.yangtools.yang.model.api.RpcDefinition;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import org.opendaylight.yangtools.yang.model.api.OperationDefinition;
import org.opendaylight.yangtools.yang.model.api.SchemaNode;
import org.opendaylight.yangtools.yang.model.api.TypedDataSchemaNode;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Text;
public final class JsonParserStream implements Closeable, Flushable {
static final String ANYXML_ARRAY_ELEMENT_ID = "array-element";
+ private static final Logger LOG = LoggerFactory.getLogger(JsonParserStream.class);
private final Deque<URI> namespaces = new ArrayDeque<>();
private final NormalizedNodeStreamWriter writer;
private final JSONCodecFactory codecs;
private final DataSchemaNode parentNode;
+ // TODO: consider class specialization to remove this field
+ private final boolean lenient;
+
private JsonParserStream(final NormalizedNodeStreamWriter writer, final JSONCodecFactory codecs,
- final DataSchemaNode parentNode) {
+ final DataSchemaNode parentNode, final boolean lenient) {
this.writer = requireNonNull(writer);
this.codecs = requireNonNull(codecs);
this.parentNode = parentNode;
+ this.lenient = lenient;
}
/**
* @return A new {@link JsonParserStream}
* @throws NullPointerException if any of the arguments are null
*/
- public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+ public static @NonNull JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
final @NonNull JSONCodecFactory codecFactory) {
- return new JsonParserStream(writer, codecFactory, codecFactory.getSchemaContext());
+ return new JsonParserStream(writer, codecFactory, codecFactory.getSchemaContext(), false);
}
/**
* @return A new {@link JsonParserStream}
* @throws NullPointerException if any of the arguments are null
*/
- public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+ public static @NonNull JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
final @NonNull JSONCodecFactory codecFactory, final @NonNull SchemaNode parentNode) {
- if (parentNode instanceof RpcDefinition) {
- return new JsonParserStream(writer, codecFactory, new RpcAsContainer((RpcDefinition) parentNode));
- }
- checkArgument(parentNode instanceof DataSchemaNode, "An instance of DataSchemaNode is expected, %s supplied",
- parentNode);
- return new JsonParserStream(writer, codecFactory, (DataSchemaNode) parentNode);
+ return new JsonParserStream(writer, codecFactory, validateParent(parentNode), false);
}
/**
* Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
- * and {@link SchemaContext}. The stream will be logically rooted at the top of the supplied SchemaContext.
+ * and {@link JSONCodecFactory}. The stream will be logically rooted at the top of the SchemaContext associated
+ * with the specified codec factory.
+ *
+ * <p>
+ * Returned parser will treat incoming JSON data leniently:
+ * <ul>
+ * <li>JSON elements referring to unknown constructs will be silently ignored</li>
+ * </ul>
*
* @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
- * @param schemaContext {@link SchemaContext} to use
+ * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves
* @return A new {@link JsonParserStream}
* @throws NullPointerException if any of the arguments are null
- *
- * @deprecated Use {@link #create(NormalizedNodeStreamWriter, JSONCodecFactory)} instead.
*/
- @Deprecated
- public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
- final @NonNull SchemaContext schemaContext) {
- return create(writer, JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
+ public static @NonNull JsonParserStream createLenient(final @NonNull NormalizedNodeStreamWriter writer,
+ final @NonNull JSONCodecFactory codecFactory) {
+ return new JsonParserStream(writer, codecFactory, codecFactory.getSchemaContext(), true);
}
/**
* Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
- * and {@link SchemaContext}. The stream will be logically rooted at the specified parent node.
+ * and {@link JSONCodecFactory}. The stream will be logically rooted at the specified parent node.
+ *
+ * <p>
+ * Returned parser will treat incoming JSON data leniently:
+ * <ul>
+ * <li>JSON elements referring to unknown constructs will be silently ignored</li>
+ * </ul>
*
* @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
- * @param schemaContext {@link SchemaContext} to use
+ * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves
* @param parentNode Logical root node
* @return A new {@link JsonParserStream}
* @throws NullPointerException if any of the arguments are null
- *
- * @deprecated Use {@link #create(NormalizedNodeStreamWriter, JSONCodecFactory, SchemaNode)} instead.
*/
- @Deprecated
- public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
- final @NonNull SchemaContext schemaContext, final @NonNull SchemaNode parentNode) {
- return create(writer, JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext),
- parentNode);
+ public static @NonNull JsonParserStream createLenient(final @NonNull NormalizedNodeStreamWriter writer,
+ final @NonNull JSONCodecFactory codecFactory, final @NonNull SchemaNode parentNode) {
+ return new JsonParserStream(writer, codecFactory, validateParent(parentNode), true);
}
public JsonParserStream parse(final JsonReader reader) {
// code copied from gson's JsonParser and Stream classes
- final boolean lenient = reader.isLenient();
+ final boolean readerLenient = reader.isLenient();
reader.setLenient(true);
boolean isEmpty = true;
try {
reader.peek();
isEmpty = false;
- final CompositeNodeDataWithSchema compositeNodeDataWithSchema = new CompositeNodeDataWithSchema(parentNode);
+ final CompositeNodeDataWithSchema<?> compositeNodeDataWithSchema =
+ new CompositeNodeDataWithSchema<>(parentNode);
read(reader, compositeNodeDataWithSchema);
compositeNodeDataWithSchema.write(writer);
} catch (StackOverflowError | OutOfMemoryError e) {
throw new JsonParseException("Failed parsing JSON source: " + reader + " to Json", e);
} finally {
- reader.setLenient(lenient);
+ reader.setLenient(readerLenient);
}
}
parent.setValue(domSource);
}
- public void read(final JsonReader in, AbstractNodeDataWithSchema parent) throws IOException {
+ public void read(final JsonReader in, AbstractNodeDataWithSchema<?> parent) throws IOException {
switch (in.peek()) {
case STRING:
case NUMBER:
if (parent instanceof LeafNodeDataWithSchema) {
read(in, parent);
} else {
- final AbstractNodeDataWithSchema newChild = newArrayEntry(parent);
+ final AbstractNodeDataWithSchema<?> newChild = newArrayEntry(parent);
read(in, newChild);
}
}
while (in.hasNext()) {
final String jsonElementName = in.nextName();
DataSchemaNode parentSchema = parent.getSchema();
- if (parentSchema instanceof YangModeledAnyXmlSchemaNode) {
- parentSchema = ((YangModeledAnyXmlSchemaNode) parentSchema).getSchemaOfAnyXmlData();
+ if (parentSchema instanceof YangModeledAnyxmlSchemaNode) {
+ parentSchema = ((YangModeledAnyxmlSchemaNode) parentSchema).getSchemaOfAnyXmlData();
}
final Entry<String, URI> namespaceAndName = resolveNamespace(jsonElementName, parentSchema);
final String localName = namespaceAndName.getKey();
- addNamespace(namespaceAndName.getValue());
+ final URI namespace = namespaceAndName.getValue();
+ if (lenient && (localName == null || namespace == null)) {
+ LOG.debug("Schema node with name {} was not found under {}", localName,
+ parentSchema.getQName());
+ in.skipValue();
+ continue;
+ }
+ addNamespace(namespace);
if (!namesakes.add(jsonElementName)) {
throw new JsonSyntaxException("Duplicate name " + jsonElementName + " in JSON input.");
}
ParserStreamUtils.findSchemaNodeByNameAndNamespace(parentSchema, localName,
getCurrentNamespace());
checkState(!childDataSchemaNodes.isEmpty(),
- "Schema for node with name %s and namespace %s does not exist.", localName,
- getCurrentNamespace());
+ "Schema for node with name %s and namespace %s does not exist at %s",
+ localName, getCurrentNamespace(), parentSchema.getPath());
+
- final AbstractNodeDataWithSchema newChild = ((CompositeNodeDataWithSchema) parent)
+ final AbstractNodeDataWithSchema<?> newChild = ((CompositeNodeDataWithSchema<?>) parent)
.addChild(childDataSchemaNodes);
if (newChild instanceof AnyXmlNodeDataWithSchema) {
readAnyXmlValue(in, (AnyXmlNodeDataWithSchema) newChild, jsonElementName);
}
}
- private static boolean isArray(final AbstractNodeDataWithSchema parent) {
+ private static boolean isArray(final AbstractNodeDataWithSchema<?> parent) {
return parent instanceof ListNodeDataWithSchema || parent instanceof LeafListNodeDataWithSchema;
}
- private static AbstractNodeDataWithSchema newArrayEntry(final AbstractNodeDataWithSchema parent) {
- AbstractNodeDataWithSchema newChild;
+ private static AbstractNodeDataWithSchema<?> newArrayEntry(final AbstractNodeDataWithSchema<?> parent) {
+ AbstractNodeDataWithSchema<?> newChild;
if (parent instanceof ListNodeDataWithSchema) {
- newChild = new ListEntryNodeDataWithSchema(parent.getSchema());
+ newChild = ListEntryNodeDataWithSchema.forSchema(((ListNodeDataWithSchema) parent).getSchema());
} else if (parent instanceof LeafListNodeDataWithSchema) {
- newChild = new LeafListEntryNodeDataWithSchema(parent.getSchema());
+ newChild = new LeafListEntryNodeDataWithSchema(((LeafListNodeDataWithSchema) parent).getSchema());
} else {
throw new IllegalStateException("Found an unexpected array nested under " + parent.getSchema().getQName());
}
- ((CompositeNodeDataWithSchema) parent).addChild(newChild);
+ ((CompositeNodeDataWithSchema<?>) parent).addChild(newChild);
return newChild;
}
- private void setValue(final AbstractNodeDataWithSchema parent, final String value) {
+ private void setValue(final AbstractNodeDataWithSchema<?> parent, final String value) {
checkArgument(parent instanceof SimpleNodeDataWithSchema, "Node %s is not a simple type",
parent.getSchema().getQName());
- final SimpleNodeDataWithSchema parentSimpleNode = (SimpleNodeDataWithSchema) parent;
+ final SimpleNodeDataWithSchema<?> parentSimpleNode = (SimpleNodeDataWithSchema<?>) parent;
checkArgument(parentSimpleNode.getValue() == null, "Node '%s' has already set its value to '%s'",
parentSimpleNode.getSchema().getQName(), parentSimpleNode.getValue());
}
if (namespace == null) {
- Set<URI> potentialUris = Collections.emptySet();
- potentialUris = resolveAllPotentialNamespaces(nodeNamePart, dataSchemaNode);
+ final Set<URI> potentialUris = resolveAllPotentialNamespaces(nodeNamePart, dataSchemaNode);
if (potentialUris.contains(getCurrentNamespace())) {
namespace = getCurrentNamespace();
} else if (potentialUris.size() == 1) {
} else if (potentialUris.size() > 1) {
throw new IllegalStateException("Choose suitable module name for element " + nodeNamePart + ":"
+ toModuleNames(potentialUris));
- } else if (potentialUris.isEmpty()) {
+ } else if (potentialUris.isEmpty() && !lenient) {
throw new IllegalStateException("Schema node with name " + nodeNamePart + " was not found under "
+ dataSchemaNode.getQName() + ".");
}
return namespaces.peek();
}
+ private static DataSchemaNode validateParent(final SchemaNode parent) {
+ if (parent instanceof DataSchemaNode) {
+ return (DataSchemaNode) parent;
+ } else if (parent instanceof OperationDefinition) {
+ return OperationAsContainer.of((OperationDefinition) parent);
+ } else {
+ throw new IllegalArgumentException("Illegal parent node " + requireNonNull(parent));
+ }
+ }
+
@Override
public void flush() throws IOException {
writer.flush();