X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?a=blobdiff_plain;ds=sidebyside;f=yang%2Fyang-data-codec-gson%2Fsrc%2Fmain%2Fjava%2Forg%2Fopendaylight%2Fyangtools%2Fyang%2Fdata%2Fcodec%2Fgson%2FJsonParserStream.java;fp=yang%2Fyang-data-codec-gson%2Fsrc%2Fmain%2Fjava%2Forg%2Fopendaylight%2Fyangtools%2Fyang%2Fdata%2Fcodec%2Fgson%2FJsonParserStream.java;h=be140d6275fe8b6e4b98a631b28557cf03ce04f1;hb=50ba8458e9b8eb48cd47ecf8d0a0f6fb1d83d812;hp=3616de515b1408691c1f2f93c6ac00d2fa784bbc;hpb=4aecc9344dbf44952931c5b3ca23d34057658def;p=yangtools.git diff --git a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JsonParserStream.java b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JsonParserStream.java index 3616de515b..be140d6275 100644 --- a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JsonParserStream.java +++ b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JsonParserStream.java @@ -55,6 +55,8 @@ import org.opendaylight.yangtools.yang.model.api.OperationDefinition; import org.opendaylight.yangtools.yang.model.api.SchemaContext; import org.opendaylight.yangtools.yang.model.api.SchemaNode; import org.opendaylight.yangtools.yang.model.api.TypedDataSchemaNode; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Text; @@ -67,16 +69,21 @@ import org.w3c.dom.Text; public final class JsonParserStream implements Closeable, Flushable { static final String ANYXML_ARRAY_ELEMENT_ID = "array-element"; + private static final Logger LOG = LoggerFactory.getLogger(JsonParserStream.class); private final Deque namespaces = new ArrayDeque<>(); private final NormalizedNodeStreamWriter writer; private final JSONCodecFactory codecs; private final DataSchemaNode parentNode; + // TODO: consider class specialization to remove this field + private final boolean lenient; + private JsonParserStream(final NormalizedNodeStreamWriter writer, final JSONCodecFactory codecs, - final DataSchemaNode parentNode) { + final DataSchemaNode parentNode, final boolean lenient) { this.writer = requireNonNull(writer); this.codecs = requireNonNull(codecs); this.parentNode = parentNode; + this.lenient = lenient; } /** @@ -91,7 +98,7 @@ public final class JsonParserStream implements Closeable, Flushable { */ public static @NonNull JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer, final @NonNull JSONCodecFactory codecFactory) { - return new JsonParserStream(writer, codecFactory, codecFactory.getSchemaContext()); + return new JsonParserStream(writer, codecFactory, codecFactory.getSchemaContext(), false); } /** @@ -106,15 +113,7 @@ public final class JsonParserStream implements Closeable, Flushable { */ public static @NonNull JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer, final @NonNull JSONCodecFactory codecFactory, final @NonNull SchemaNode parentNode) { - final DataSchemaNode parent; - if (parentNode instanceof DataSchemaNode) { - parent = (DataSchemaNode) parentNode; - } else if (parentNode instanceof OperationDefinition) { - parent = OperationAsContainer.of((OperationDefinition) parentNode); - } else { - throw new IllegalArgumentException("Illegal parent node " + requireNonNull(parentNode)); - } - return new JsonParserStream(writer, codecFactory, parent); + return new JsonParserStream(writer, codecFactory, validateParent(parentNode), false); } /** @@ -151,10 +150,52 @@ public final class JsonParserStream implements Closeable, Flushable { parentNode); } + /** + * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter} + * and {@link JSONCodecFactory}. The stream will be logically rooted at the top of the SchemaContext associated + * with the specified codec factory. + * + *

+ * Returned parser will treat incoming JSON data leniently: + *

+ * + * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes + * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves + * @return A new {@link JsonParserStream} + * @throws NullPointerException if any of the arguments are null + */ + public static @NonNull JsonParserStream createLenient(final @NonNull NormalizedNodeStreamWriter writer, + final @NonNull JSONCodecFactory codecFactory) { + return new JsonParserStream(writer, codecFactory, codecFactory.getSchemaContext(), true); + } + + /** + * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter} + * and {@link JSONCodecFactory}. The stream will be logically rooted at the specified parent node. + * + *

+ * Returned parser will treat incoming JSON data leniently: + *

+ * + * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes + * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves + * @param parentNode Logical root node + * @return A new {@link JsonParserStream} + * @throws NullPointerException if any of the arguments are null + */ + public static @NonNull JsonParserStream createLenient(final @NonNull NormalizedNodeStreamWriter writer, + final @NonNull JSONCodecFactory codecFactory, final @NonNull SchemaNode parentNode) { + return new JsonParserStream(writer, codecFactory, validateParent(parentNode), true); + } + public JsonParserStream parse(final JsonReader reader) { // code copied from gson's JsonParser and Stream classes - final boolean lenient = reader.isLenient(); + final boolean readerLenient = reader.isLenient(); reader.setLenient(true); boolean isEmpty = true; try { @@ -178,7 +219,7 @@ public final class JsonParserStream implements Closeable, Flushable { } catch (StackOverflowError | OutOfMemoryError e) { throw new JsonParseException("Failed parsing JSON source: " + reader + " to Json", e); } finally { - reader.setLenient(lenient); + reader.setLenient(readerLenient); } } @@ -280,7 +321,14 @@ public final class JsonParserStream implements Closeable, Flushable { } final Entry namespaceAndName = resolveNamespace(jsonElementName, parentSchema); final String localName = namespaceAndName.getKey(); - addNamespace(namespaceAndName.getValue()); + final URI namespace = namespaceAndName.getValue(); + if (lenient && (localName == null || namespace == null)) { + LOG.debug("Schema node with name {} was not found under {}", localName, + parentSchema.getQName()); + in.skipValue(); + continue; + } + addNamespace(namespace); if (!namesakes.add(jsonElementName)) { throw new JsonSyntaxException("Duplicate name " + jsonElementName + " in JSON input."); } @@ -375,7 +423,7 @@ public final class JsonParserStream implements Closeable, Flushable { } else if (potentialUris.size() > 1) { throw new IllegalStateException("Choose suitable module name for element " + nodeNamePart + ":" + toModuleNames(potentialUris)); - } else if (potentialUris.isEmpty()) { + } else if (potentialUris.isEmpty() && !lenient) { throw new IllegalStateException("Schema node with name " + nodeNamePart + " was not found under " + dataSchemaNode.getQName() + "."); } @@ -419,6 +467,16 @@ public final class JsonParserStream implements Closeable, Flushable { return namespaces.peek(); } + private static DataSchemaNode validateParent(final SchemaNode parent) { + if (parent instanceof DataSchemaNode) { + return (DataSchemaNode) parent; + } else if (parent instanceof OperationDefinition) { + return OperationAsContainer.of((OperationDefinition) parent); + } else { + throw new IllegalArgumentException("Illegal parent node " + requireNonNull(parent)); + } + } + @Override public void flush() throws IOException { writer.flush();